repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
opendroid-Team/enigma2-4.1 | refs/heads/master | lib/python/Components/Renderer/WatchesNobile.py | 5 | ## Now FTA-render ;)
import math
from Renderer import Renderer
from skin import parseColor
from enigma import eCanvas, eSize, gRGB, eRect
from Components.VariableText import VariableText
from Components.config import config
class WatchesNobile(Renderer):
def __init__(self):
Renderer.__init__(self)
self.fColor = gRGB(255, 255, 255, 0)
self.bColor = gRGB(0, 0, 0, 255)
self.numval = -1
GUI_WIDGET = eCanvas
def applySkin(self, desktop, parent):
attribs = []
for (attrib, what,) in self.skinAttributes:
if (attrib == 'foregroundColor'):
self.fColor = parseColor(what)
elif (attrib == 'backgroundColor'):
self.bColor = parseColor(what)
else:
attribs.append((attrib, what))
self.skinAttributes = attribs
return Renderer.applySkin(self, desktop, parent)
def calculate(self, w, r, m):
a = (w * 6)
z = (math.pi / 180)
x = int(round((r * math.sin((a * z)))))
y = int(round((r * math.cos((a * z)))))
return ((m + x),(m - y))
def hand(self):
width = self.instance.size().width()
height = self.instance.size().height()
r = (min(width, height) / 2)
(endX, endY,) = self.calculate(self.numval, r, r)
self.draw_line(r, r, endX, endY)
def draw_line(self, x0, y0, x1, y1):
steep = (abs((y1 - y0)) > abs((x1 - x0)))
if steep:
x0,y0 = y0,x0
x1,y1 = y1,x1
if (x0 > x1):
x0,x1 = x1,x0
y0,y1 = y1,y0
if (y0 < y1):
ystep = 1
else:
ystep = -1
deltax = (x1 - x0)
deltay = abs((y1 - y0))
error = (-deltax / 2)
y = y0
for x in range(x0, (x1 + 1)):
if steep:
self.instance.fillRect(eRect(y, x, 2, 2), self.fColor)
else:
self.instance.fillRect(eRect(x, y, 2, 2), self.fColor)
error = (error + deltay)
if (error > 0):
y = (y + ystep)
error = (error - deltax)
def changed(self, what):
sss = self.source.value
if (what[0] == self.CHANGED_CLEAR):
pass
elif self.instance:
self.instance.show()
if (self.numval != sss):
self.numval = sss
self.instance.clear(self.bColor)
self.hand()
def postWidgetCreate(self, instance):
def parseSize(str):
(x, y,) = str.split(',')
return eSize(int(x), int(y))
for (attrib, value,) in self.skinAttributes:
if ((attrib == 'size') and self.instance.setSize(parseSize(value))):
pass
self.instance.clear(self.bColor)
|
justajeffy/arsenalsuite | refs/heads/master | cpp/lib/PyQt4/examples/tools/regexp.py | 20 | #!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2010 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
# This is only needed for Python v2 but is harmless for Python v3.
import sip
sip.setapi('QVariant', 2)
from PyQt4 import QtCore, QtGui
class RegExpDialog(QtGui.QDialog):
MaxCaptures = 6
def __init__(self, parent=None):
super(RegExpDialog, self).__init__(parent)
self.patternComboBox = QtGui.QComboBox()
self.patternComboBox.setEditable(True)
self.patternComboBox.setSizePolicy(QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Preferred)
patternLabel = QtGui.QLabel("&Pattern:")
patternLabel.setBuddy(self.patternComboBox)
self.escapedPatternLineEdit = QtGui.QLineEdit()
self.escapedPatternLineEdit.setReadOnly(True)
palette = self.escapedPatternLineEdit.palette()
palette.setBrush(QtGui.QPalette.Base,
palette.brush(QtGui.QPalette.Disabled, QtGui.QPalette.Base))
self.escapedPatternLineEdit.setPalette(palette)
escapedPatternLabel = QtGui.QLabel("&Escaped Pattern:")
escapedPatternLabel.setBuddy(self.escapedPatternLineEdit)
self.syntaxComboBox = QtGui.QComboBox()
self.syntaxComboBox.addItem("Regular expression v1",
QtCore.QRegExp.RegExp)
self.syntaxComboBox.addItem("Regular expression v2",
QtCore.QRegExp.RegExp2)
self.syntaxComboBox.addItem("Wildcard", QtCore.QRegExp.Wildcard)
self.syntaxComboBox.addItem("Fixed string",
QtCore.QRegExp.FixedString)
syntaxLabel = QtGui.QLabel("&Pattern Syntax:")
syntaxLabel.setBuddy(self.syntaxComboBox)
self.textComboBox = QtGui.QComboBox()
self.textComboBox.setEditable(True)
self.textComboBox.setSizePolicy(QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Preferred)
textLabel = QtGui.QLabel("&Text:")
textLabel.setBuddy(self.textComboBox)
self.caseSensitiveCheckBox = QtGui.QCheckBox("Case &Sensitive")
self.caseSensitiveCheckBox.setChecked(True)
self.minimalCheckBox = QtGui.QCheckBox("&Minimal")
indexLabel = QtGui.QLabel("Index of Match:")
self.indexEdit = QtGui.QLineEdit()
self.indexEdit.setReadOnly(True)
matchedLengthLabel = QtGui.QLabel("Matched Length:")
self.matchedLengthEdit = QtGui.QLineEdit()
self.matchedLengthEdit.setReadOnly(True)
self.captureLabels = []
self.captureEdits = []
for i in range(self.MaxCaptures):
self.captureLabels.append(QtGui.QLabel("Capture %d:" % i))
self.captureEdits.append(QtGui.QLineEdit())
self.captureEdits[i].setReadOnly(True)
self.captureLabels[0].setText("Match:")
checkBoxLayout = QtGui.QHBoxLayout()
checkBoxLayout.addWidget(self.caseSensitiveCheckBox)
checkBoxLayout.addWidget(self.minimalCheckBox)
checkBoxLayout.addStretch(1)
mainLayout = QtGui.QGridLayout()
mainLayout.addWidget(patternLabel, 0, 0)
mainLayout.addWidget(self.patternComboBox, 0, 1)
mainLayout.addWidget(escapedPatternLabel, 1, 0)
mainLayout.addWidget(self.escapedPatternLineEdit, 1, 1)
mainLayout.addWidget(syntaxLabel, 2, 0)
mainLayout.addWidget(self.syntaxComboBox, 2, 1)
mainLayout.addLayout(checkBoxLayout, 3, 0, 1, 2)
mainLayout.addWidget(textLabel, 4, 0)
mainLayout.addWidget(self.textComboBox, 4, 1)
mainLayout.addWidget(indexLabel, 5, 0)
mainLayout.addWidget(self.indexEdit, 5, 1)
mainLayout.addWidget(matchedLengthLabel, 6, 0)
mainLayout.addWidget(self.matchedLengthEdit, 6, 1)
for i in range(self.MaxCaptures):
mainLayout.addWidget(self.captureLabels[i], 7 + i, 0)
mainLayout.addWidget(self.captureEdits[i], 7 + i, 1)
self.setLayout(mainLayout)
self.patternComboBox.editTextChanged.connect(self.refresh)
self.textComboBox.editTextChanged.connect(self.refresh)
self.caseSensitiveCheckBox.toggled.connect(self.refresh)
self.minimalCheckBox.toggled.connect(self.refresh)
self.syntaxComboBox.currentIndexChanged.connect(self.refresh)
self.patternComboBox.addItem("[A-Za-z_]+([A-Za-z_0-9]*)")
self.textComboBox.addItem("(10 + delta4)* 32")
self.setWindowTitle("RegExp")
self.setFixedHeight(self.sizeHint().height())
self.refresh()
def refresh(self):
self.setUpdatesEnabled(False)
pattern = self.patternComboBox.currentText()
text = self.textComboBox.currentText()
escaped = str(pattern)
escaped.replace('\\', '\\\\')
escaped.replace('"', '\\"')
self.escapedPatternLineEdit.setText('"' + escaped + '"')
rx = QtCore.QRegExp(pattern)
cs = QtCore.Qt.CaseInsensitive
if self.caseSensitiveCheckBox.isChecked():
cs = QtCore.Qt.CaseSensitive
rx.setCaseSensitivity(cs)
rx.setMinimal(self.minimalCheckBox.isChecked())
syntax = self.syntaxComboBox.itemData(self.syntaxComboBox.currentIndex())
rx.setPatternSyntax(syntax)
palette = self.patternComboBox.palette()
if rx.isValid():
palette.setColor(QtGui.QPalette.Text,
self.textComboBox.palette().color(QtGui.QPalette.Text))
else:
palette.setColor(QtGui.QPalette.Text, QtCore.Qt.red)
self.patternComboBox.setPalette(palette)
self.indexEdit.setText(str(rx.indexIn(text)))
self.matchedLengthEdit.setText(str(rx.matchedLength()))
for i in range(self.MaxCaptures):
self.captureLabels[i].setEnabled(i <= rx.numCaptures())
self.captureEdits[i].setEnabled(i <= rx.numCaptures())
self.captureEdits[i].setText(rx.cap(i))
self.setUpdatesEnabled(True)
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
dialog = RegExpDialog()
sys.exit(dialog.exec_())
|
emesene/emesene | refs/heads/master | emesene/e3/papylib/papyon/papyon/sip/extensions/ms_conversation_id.py | 6 | # -*- coding: utf-8 -*-
#
# papyon - a python client library for Msn
#
# Copyright (C) 2010 Collabora Ltd.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from papyon.sip.extensions.base import SIPExtension
class MSConversationIDExtension(SIPExtension):
def __init__(self, client, core):
SIPExtension.__init__(self, client, core)
def extend_request(self, message):
call = self._client.call_manager.find_call(message)
if call is not None and call.media_session.has_video:
conversation_id = 1
else:
conversation_id = 0
message.add_header("Ms-Conversation-ID", "f=%s" % conversation_id)
|
Daniel-CA/odoo-addons | refs/heads/8.0 | account_analytic_analysis_recurring_day/models/sale_order.py | 4 | # -*- coding: utf-8 -*-
# (c) 2016 Alfredo de la Fuente - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import models, api, exceptions, _
class SaleOrder(models.Model):
_inherit = 'sale.order'
@api.multi
def action_button_confirm(self):
res = super(SaleOrder, self).action_button_confirm()
for sale in self:
if (sale.project_id and sale.project_id.recurring_invoices and
sale.project_id.recurring_rule_type == 'monthly'):
if (not sale.project_id.recurring_first_day and not
sale.project_id.recurring_last_day and not
sale.project_id.recurring_the_day):
raise exceptions.Warning(
_('In the sale order %s, with contract: %s, you must'
' indicate what day will generate the next invoice')
% (sale.name, sale.project_id.name))
return res
|
madjam/mxnet | refs/heads/master | example/reinforcement-learning/parallel_actor_critic/config.py | 52 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mxnet as mx
class Config(object):
def __init__(self, args):
# Default training settings
self.ctx = mx.gpu(0) if args.gpu else mx.cpu()
self.init_func = mx.init.Xavier(rnd_type='uniform', factor_type="in",
magnitude=1)
self.learning_rate = 1e-3
self.update_rule = "adam"
self.grad_clip = True
self.clip_magnitude = 40
# Default model settings
self.hidden_size = 200
self.gamma = 0.99
self.lambda_ = 1.0
self.vf_wt = 0.5 # Weight of value function term in the loss
self.entropy_wt = 0.01 # Weight of entropy term in the loss
self.num_envs = 16
self.t_max = 50
# Override defaults with values from `args`.
for arg in self.__dict__:
if arg in args.__dict__:
self.__setattr__(arg, args.__dict__[arg])
|
papouso/odoo | refs/heads/8.0 | addons/account_followup/__openerp__.py | 261 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Payment Follow-up Management',
'version': '1.0',
'category': 'Accounting & Finance',
'description': """
Module to automate letters for unpaid invoices, with multi-level recalls.
=========================================================================
You can define your multiple levels of recall through the menu:
---------------------------------------------------------------
Configuration / Follow-up / Follow-up Levels
Once it is defined, you can automatically print recalls every day through simply clicking on the menu:
------------------------------------------------------------------------------------------------------
Payment Follow-Up / Send Email and letters
It will generate a PDF / send emails / set manual actions according to the the different levels
of recall defined. You can define different policies for different companies.
Note that if you want to check the follow-up level for a given partner/account entry, you can do from in the menu:
------------------------------------------------------------------------------------------------------------------
Reporting / Accounting / **Follow-ups Analysis
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/billing',
'depends': ['account_accountant', 'mail'],
'data': [
'security/account_followup_security.xml',
'security/ir.model.access.csv',
'report/account_followup_report.xml',
'account_followup_data.xml',
'account_followup_view.xml',
'account_followup_customers.xml',
'wizard/account_followup_print_view.xml',
'res_config_view.xml',
'views/report_followup.xml',
'account_followup_reports.xml'
],
'demo': ['account_followup_demo.xml'],
'test': [
'test/account_followup.yml',
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
rahmalik/trafficserver | refs/heads/master | tests/tools/traffic-replay/SSLReplay.py | 5 | #!/bin/env python3
'''
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import http.client
import socket
import ssl
import pprint
import requests
import os
#import threading
import sys
from multiprocessing import current_process
import sessionvalidation.sessionvalidation as sv
import lib.result as result
import extractHeader
from threading import Thread
import mainProcess
import json
import extractHeader
import time
import Config
bSTOP = False
class ProxyHTTPSConnection(http.client.HTTPSConnection):
"This class allows communication via SSL."
default_port = http.client.HTTPS_PORT
# XXX Should key_file and cert_file be deprecated in favour of context?
def __init__(self, host, port=None, key_file=None, cert_file=None,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None, *, context=None,
check_hostname=None, server_name=None):
# http.client.HTTPSConnection.__init__(self)
super().__init__(host, port, key_file, cert_file, timeout, source_address, context=context, check_hostname=check_hostname)
'''
self.key_file = key_file
self.cert_file = cert_file
if context is None:
context = ssl._create_default_https_context()
will_verify = context.verify_mode != ssl.CERT_NONE
if check_hostname is None:
check_hostname = context.check_hostname
if check_hostname and not will_verify:
raise ValueError("check_hostname needs a SSL context with "
"either CERT_OPTIONAL or CERT_REQUIRED")
if key_file or cert_file:
context.load_cert_chain(cert_file, key_file)
self._context = context
self._check_hostname = check_hostname
'''
self.server_name = server_name
def connect(self):
"Connect to a host on a given (SSL) port."
http.client.HTTPConnection.connect(self)
if self._tunnel_host:
server_hostname = self._tunnel_host
else:
server_hostname = self.server_name
self.sock = self._context.wrap_socket(self.sock,
do_handshake_on_connect=True,
server_side=False,
server_hostname=server_hostname)
if not self._context.check_hostname and self._check_hostname:
try:
ssl.match_hostname(self.sock.getpeercert(), server_hostname)
except Exception:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
raise
def txn_replay(session_filename, txn, proxy, result_queue, request_session):
""" Replays a single transaction
:param request_session: has to be a valid requests session"""
req = txn.getRequest()
resp = txn.getResponse()
responseDict = {}
# Construct HTTP request & fire it off
txn_req_headers = req.getHeaders()
txn_req_headers_dict = extractHeader.header_to_dict(txn_req_headers)
txn_req_headers_dict['Content-MD5'] = txn._uuid # used as unique identifier
if 'body' in txn_req_headers_dict:
del txn_req_headers_dict['body']
#print("Replaying session")
try:
# response = request_session.request(extractHeader.extract_txn_req_method(txn_req_headers),
# 'http://' + extractHeader.extract_host(txn_req_headers) + extractHeader.extract_GET_path(txn_req_headers),
# headers=txn_req_headers_dict,stream=False) # making stream=False raises contentdecoding exception? kill me
method = extractHeader.extract_txn_req_method(txn_req_headers)
response = None
body = None
content = None
if 'Transfer-Encoding' in txn_req_headers_dict:
# deleting the host key, since the STUPID post/get functions are going to add host field anyway, so there will be multiple host fields in the header
# This confuses the ATS and it returns 400 "Invalid HTTP request". I don't believe this
# BUT, this is not a problem if the data is not chunked encoded.. Strange, huh?
del txn_req_headers_dict['Host']
if 'Content-Length' in txn_req_headers_dict:
#print("ewww !")
del txn_req_headers_dict['Content-Length']
body = gen()
if 'Content-Length' in txn_req_headers_dict:
nBytes = int(txn_req_headers_dict['Content-Length'])
body = createDummyBodywithLength(nBytes)
#print("request session is",id(request_session))
if method == 'GET':
request_session.request('GET', 'https://' + extractHeader.extract_host(txn_req_headers) + extractHeader.extract_GET_path(txn_req_headers),
headers=txn_req_headers_dict, body=body)
r1 = request_session.getresponse()
responseHeaders = r1.getheaders()
responseContent = r1.read()
elif method == 'POST':
request_session.request('POST', 'https://' + extractHeader.extract_host(txn_req_headers) + extractHeader.extract_GET_path(txn_req_headers),
headers=txn_req_headers_dict, body=body)
r1 = request_session.getresponse()
responseHeaders = r1.getheaders()
responseContent = r1.read()
elif method == 'HEAD':
request_session.request('HEAD', 'https://' + extractHeader.extract_host(txn_req_headers) + extractHeader.extract_GET_path(txn_req_headers),
headers=txn_req_headers_dict, body=body)
r1 = request_session.getresponse()
responseHeaders = r1.getheaders()
responseContent = r1.read()
for key, value in responseHeaders:
responseDict[key.lower()] = value
expected = extractHeader.responseHeader_to_dict(resp.getHeaders())
# print(responseDict)
if mainProcess.verbose:
expected_output_split = resp.getHeaders().split('\r\n')[0].split(' ', 2)
expected_output = (int(expected_output_split[1]), str(expected_output_split[2]))
r = result.Result(session_filename, expected_output[0], r1.status)
print(r.getResultString(responseDict, expected, colorize=True))
r.Compare(responseDict, expected)
# result_queue.put(r)
except UnicodeEncodeError as e:
# these unicode errors are due to the interaction between Requests and our wiretrace data.
# TODO fix
print("UnicodeEncodeError exception")
except requests.exceptions.ContentDecodingError as e:
print("ContentDecodingError", e)
except:
e = sys.exc_info()
print("ERROR in requests: ", e, response, session_filename)
def client_replay(input, proxy, result_queue, nThread):
Threads = []
for i in range(nThread):
t = Thread(target=session_replay, args=[input, proxy, result_queue])
t.start()
Threads.append(t)
for t1 in Threads:
t1.join()
def session_replay(input, proxy, result_queue):
''' Replay all transactions in session
This entire session will be replayed in one requests.Session (so one socket / TCP connection)'''
# if timing_control:
# time.sleep(float(session._timestamp)) # allow other threads to run
global bSTOP
sslSocks = []
while bSTOP == False:
for session in iter(input.get, 'STOP'):
txn = session.returnFirstTransaction()
req = txn.getRequest()
# Construct HTTP request & fire it off
txn_req_headers = req.getHeaders()
txn_req_headers_dict = extractHeader.header_to_dict(txn_req_headers)
sc = ssl.SSLContext(protocol=ssl.PROTOCOL_SSLv23)
if Config.ca_certs != None and Config.keyfile != None:
sc.load_cert_chain(Config.ca_certs, keyfile=Config.keyfile)
conn = ProxyHTTPSConnection(Config.proxy_host, Config.proxy_ssl_port, cert_file=Config.ca_certs,
key_file=Config.keyfile, context=sc, server_name=txn_req_headers_dict['Host'])
for txn in session.getTransactionIter():
try:
# print(txn._uuid)
txn_replay(session._filename, txn, proxy, result_queue, conn)
except:
e = sys.exc_info()
print("ERROR in replaying: ", e, txn.getRequest().getHeaders())
#sslSocket.bStop = False
bSTOP = True
#print("stopping now")
input.put('STOP')
break
# time.sleep(0.5)
for sslSock in sslSocks:
sslSock.ssl_sock.close()
|
beastwood/synctity | refs/heads/master | rsync.py | 1 | '''
Copyright 2009, 2010 Brian S. Eastwood.
This file is part of Synctus.
Synctus is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Synctus is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Synctus. If not, see <http://www.gnu.org/licenses/>.
Created on Nov 9, 2009
'''
import commands
class Path:
'''
Represents a fully qualified network path name as recognized by rsync.
This includes a user, host, and path. An rsync path is printed as:
user@host:path.
'''
def __init__(self, path='', host='', user=''):
''' Initialize a network path '''
self.user = user
self.host = host
self.path = path
def __str__(self):
''' Build the full network path string'''
str = ''
if self.user != '':
str = str + self.user + "@"
if self.host != '':
str = str + self.host + ":"
elif self.user != '':
str = str + "localhost:"
str = str + self.path
if (str == ''):
str = '.'
return str
def getUser(self): return self.user
def setUser(self, value): self.user = value
def getHost(self): return self.host
def setHost(self, value): self.host = value
def getPath(self): return self.path
def setPath(self, value): self.path = value
class Option:
'''
Represents an option in the rsync command. Options can either be
short-style options, like -v, or long-style options, like --verbose.
Both option styles can have parameters, like -e 'ssh -p 2234' or
--exclude=pattern.
Options are stored in a dictionary with (key, [list of params]) entries.
This approach is used because an rsync option can appear multiple times in
a single rsync command, e.g. --exclude=pathA --exclude=pathB. In this
example, there would be a single key for the --exclude option and two
parameters to specify the paths.
'''
def __init__(self):
self.options = dict()
def __iter__(self):
return self.options.__iter__()
def enable(self, option, param=''):
''' Enable or turn on a flag. The flag and optional parameter are
added to the dictionary of options. '''
if option in self.options:
# the option is already in the list,
# so append the parameter if it is unique
if not param in self.options[option]:
self.options[option].append(param)
else:
# the option is new, so create an entry in a list
self.options[option] = [param]
def disable(self, option, param=None):
''' Disable or turn off a flag. This removes the first option whose
flag name matches.'''
if option in self.options:
if param == None:
del self.options[option]
elif param in self.options[option]:
self.options[option].remove(param)
if len(self.options[option]) == 0:
del self.options[option]
def __str__(self):
str = ''
for opt in sorted(self.options):
ostr = ''
if len(opt) == 1:
for param in self.options[opt]:
ostr = ostr + "-%s %s " % (opt, param)
else:
for param in self.options[opt]:
if len(param) > 0:
ostr = ostr + "--%s=%s " % (opt, param)
else:
ostr = ostr + "--%s " % opt
str = str + " " + ostr.strip()
return str.strip()
def getOptions(self): return self.options
def setOptions(self, value): self.options = value
class Command:
'''
An rsync command, which has the format:
rsync OPTIONS SOURCE DESTINATION
rsync commands can be run forwards or backwards (by swapping destination
and source).
'''
def __init__(self, srcPath=None, destPath=None, options=None):
'''
Initialize a command instance.
'''
# Big lesson learned here. If you specify default parameters to a
# method, they are instantiated only once, when the define is read.
# This has big implications for mutable arguments to __init__ methods,
# because every instance of the class will share the same init
# parameters. This can be a very bad thing if your class holds these
# parameters as instance variables.
if srcPath != None:
self.source = srcPath
else:
self.source = Path()
if destPath !=None:
self.destination = destPath
else:
self.destination = Path()
if options != None:
self.options = options
else:
self.options = Option()
self.options.enable('n')
def forward(self):
return "rsync %s %s %s" % (self.options, self.source, self.destination)
def reverse(self):
return "rsync %s %s %s" % (self.options, self.destination, self.source)
def __str__(self):
return self.forward()
def execute(self, reverse=False):
''' Execute the rsync command and display the result. The reverse
option specifies whether this command should be run in forward or
reverse order, with the default being forward.'''
if (not reverse):
print "Executing: " + self.forward()
(status, output) = commands.getstatusoutput(self.forward())
else:
print "Executing: " + self.reverse()
(status, output) = commands.getstatusoutput(self.reverse())
print output
def getSource(self): return self.source
def setSource(self, value): self.source = value
def getDestination(self): return self.destination
def setDestination(self, value): self.destination = value
def getOptions(self): return self.options
def setOptions(self, value): self.options = value
def getDescription(self):
string = ''
if self.getSource().getHost() != "":
string = string + self.getSource().getHost() + ":"
string = string + self.getSource().getPath() + " --> "
if self.getDestination().getHost() != "":
string = string + self.getDestination().getHost() + ":"
string = string + self.getDestination().getPath()
return string
class Profile:
'''
A collection of rsync commands.
'''
def __init__(self, name="NewProfile"):
self.name = name
self.commands = list()
self.presync = ''
self.postsync = ''
def add(self, command):
self.commands.append(command)
def remove(self, index):
if index < len(self.commands):
self.commands.remove(self.commands[index])
def get(self, index):
if index < len(self.commands):
return self.commands[index]
def __iter__(self):
return self.commands.__iter__()
def execute(self, reverse=False):
for command in self.commands:
command.execute(reverse);
def getName(self): return self.name
def setName(self, value): self.name = value
def getCommands(self): return self.commands
# prebackup and postbackup were added later, so the get methods use getattr to
# avoid problems with old pickled Profile objects.
def getPreSync(self): return getattr(self, 'presync', '')
def getPostSync(self): return getattr(self, 'postsync', '')
def setPreSync(self, value): self.presync = value
def setPostSync(self, value): self.postsync = value
if __name__ == "__main__":
path1 = Path()
path2 = Path()
path1.setPath("path1")
path2.setPath("path2")
print path1, path2
command1 = Command()
command2 = Command()
command1.getSource().setPath("command1")
command1.getOptions().enable("c1")
print command1, command2
command2.getSource().setPath("command2")
command2.getOptions().enable("c2")
print command1, command2
source = Path("~/deleteme/source/")
dest = Path("~/deleteme/dest/")
flags = Option()
flags.enable("a")
flags.enable("v")
flags.enable("u")
flags.enable("n")
flags.enable("h")
flags.enable("exclude", "notyou")
command = Command(source, dest, flags)
profile = Profile("work")
profile.add(command)
for cmd in profile:
print cmd.getDescription()
print str(cmd)
# profile.execute()
# profile.execute(True)
|
JoelESvensson/json-schema-hack | refs/heads/master | docs/conf.py | 74 | # -*- coding: utf-8 -*-
#
# JsonSchema documentation build configuration file, created by
# sphinx-quickstart on Sat Dec 10 15:34:44 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'JsonSchema'
copyright = u'2011, Justin Rainbow, Bruno Prieto Reis'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'JsonSchemadoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'JsonSchema.tex', u'JsonSchema Documentation',
u'Justin Rainbow, Bruno Prieto Reis', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'jsonschema', u'JsonSchema Documentation',
[u'Justin Rainbow, Bruno Prieto Reis'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'JsonSchema', u'JsonSchema Documentation', u'Justin Rainbow, Bruno Prieto Reis',
'JsonSchema', 'One line description of project.', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
mikelikespie/bazel | refs/heads/master | third_party/py/gflags/tests/gflags_googletest.py | 132 | #!/usr/bin/env python
# Copyright (c) 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Some simple additions to the unittest framework useful for gflags testing."""
import re
import unittest
def Sorted(lst):
"""Equivalent of sorted(), but not dependent on python version."""
sorted_list = lst[:]
sorted_list.sort()
return sorted_list
def MultiLineEqual(expected, actual):
"""Returns True if expected == actual, or returns False and logs."""
if actual == expected:
return True
print "Error: FLAGS.MainModuleHelp() didn't return the expected result."
print "Got:"
print actual
print "[End of got]"
actual_lines = actual.split("\n")
expected_lines = expected.split("\n")
num_actual_lines = len(actual_lines)
num_expected_lines = len(expected_lines)
if num_actual_lines != num_expected_lines:
print "Number of actual lines = %d, expected %d" % (
num_actual_lines, num_expected_lines)
num_to_match = min(num_actual_lines, num_expected_lines)
for i in range(num_to_match):
if actual_lines[i] != expected_lines[i]:
print "One discrepancy: Got:"
print actual_lines[i]
print "Expected:"
print expected_lines[i]
break
else:
# If we got here, found no discrepancy, print first new line.
if num_actual_lines > num_expected_lines:
print "New help line:"
print actual_lines[num_expected_lines]
elif num_expected_lines > num_actual_lines:
print "Missing expected help line:"
print expected_lines[num_actual_lines]
else:
print "Bug in this test -- discrepancy detected but not found."
return False
class TestCase(unittest.TestCase):
def assertListEqual(self, list1, list2):
"""Asserts that, when sorted, list1 and list2 are identical."""
# This exists in python 2.7, but not previous versions. Use the
# built-in version if possible.
if hasattr(unittest.TestCase, "assertListEqual"):
unittest.TestCase.assertListEqual(self, Sorted(list1), Sorted(list2))
else:
self.assertEqual(Sorted(list1), Sorted(list2))
def assertMultiLineEqual(self, expected, actual):
# This exists in python 2.7, but not previous versions. Use the
# built-in version if possible.
if hasattr(unittest.TestCase, "assertMultiLineEqual"):
unittest.TestCase.assertMultiLineEqual(self, expected, actual)
else:
self.assertTrue(MultiLineEqual(expected, actual))
def assertRaisesWithRegexpMatch(self, exception, regexp, fn, *args, **kwargs):
try:
fn(*args, **kwargs)
except exception, why:
self.assertTrue(re.search(regexp, str(why)),
"'%s' does not match '%s'" % (regexp, why))
return
self.fail(exception.__name__ + " not raised")
def main():
unittest.main()
|
foxichu/etherkeeper | refs/heads/master | etherkeeper/organize/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
sbalde/edxplatform | refs/heads/master | lms/djangoapps/instructor/tests/test_spoc_gradebook.py | 29 | """
Tests of the instructor dashboard spoc gradebook
"""
from django.core.urlresolvers import reverse
from nose.plugins.attrib import attr
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from student.tests.factories import UserFactory, CourseEnrollmentFactory, AdminFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from capa.tests.response_xml_factory import StringResponseXMLFactory
from courseware.tests.factories import StudentModuleFactory
USER_COUNT = 11
@attr('shard_1')
class TestGradebook(SharedModuleStoreTestCase):
"""
Test functionality of the spoc gradebook. Sets up a course with assignments and
students who've scored various scores on these assignments. Base class for further
gradebook tests.
"""
grading_policy = None
@classmethod
def setUpClass(cls):
super(TestGradebook, cls).setUpClass()
# Create a course with the desired grading policy (from our class attribute)
kwargs = {}
if cls.grading_policy is not None:
kwargs['grading_policy'] = cls.grading_policy
cls.course = CourseFactory.create(**kwargs)
# Now give it some content
with cls.store.bulk_operations(cls.course.id, emit_signals=False):
chapter = ItemFactory.create(
parent_location=cls.course.location,
category="sequential",
)
section = ItemFactory.create(
parent_location=chapter.location,
category="sequential",
metadata={'graded': True, 'format': 'Homework'}
)
cls.items = [
ItemFactory.create(
parent_location=section.location,
category="problem",
data=StringResponseXMLFactory().build_xml(answer='foo'),
metadata={'rerandomize': 'always'}
)
for __ in xrange(USER_COUNT - 1)
]
def setUp(self):
super(TestGradebook, self).setUp()
instructor = AdminFactory.create()
self.client.login(username=instructor.username, password='test')
self.users = [UserFactory.create() for _ in xrange(USER_COUNT)]
for user in self.users:
CourseEnrollmentFactory.create(user=user, course_id=self.course.id)
for i, item in enumerate(self.items):
for j, user in enumerate(self.users):
StudentModuleFactory.create(
grade=1 if i < j else 0,
max_grade=1,
student=user,
course_id=self.course.id,
module_state_key=item.location
)
self.response = self.client.get(reverse(
'spoc_gradebook',
args=(self.course.id.to_deprecated_string(),)
))
self.assertEquals(self.response.status_code, 200)
@attr('shard_1')
class TestDefaultGradingPolicy(TestGradebook):
"""
Tests that the grading policy is properly applied for all users in the course
Uses the default policy (50% passing rate)
"""
def test_all_users_listed(self):
for user in self.users:
self.assertIn(user.username, unicode(self.response.content, 'utf-8'))
def test_default_policy(self):
# Default >= 50% passes, so Users 5-10 should be passing for Homework 1 [6]
# One use at the top of the page [1]
self.assertEquals(7, self.response.content.count('grade_Pass'))
# Users 1-5 attempted Homework 1 (and get Fs) [4]
# Users 1-10 attempted any homework (and get Fs) [10]
# Users 4-10 scored enough to not get rounded to 0 for the class (and get Fs) [7]
# One use at top of the page [1]
self.assertEquals(22, self.response.content.count('grade_F'))
# All other grades are None [29 categories * 11 users - 27 non-empty grades = 292]
# One use at the top of the page [1]
self.assertEquals(293, self.response.content.count('grade_None'))
@attr('shard_1')
class TestLetterCutoffPolicy(TestGradebook):
"""
Tests advanced grading policy (with letter grade cutoffs). Includes tests of
UX display (color, etc).
"""
grading_policy = {
"GRADER": [
{
"type": "Homework",
"min_count": 1,
"drop_count": 0,
"short_label": "HW",
"weight": 1
},
],
"GRADE_CUTOFFS": {
'A': .9,
'B': .8,
'C': .7,
'D': .6,
}
}
def test_styles(self):
self.assertIn("grade_A {color:green;}", self.response.content)
self.assertIn("grade_B {color:Chocolate;}", self.response.content)
self.assertIn("grade_C {color:DarkSlateGray;}", self.response.content)
self.assertIn("grade_D {color:DarkSlateGray;}", self.response.content)
def test_assigned_grades(self):
print self.response.content
# Users 9-10 have >= 90% on Homeworks [2]
# Users 9-10 have >= 90% on the class [2]
# One use at the top of the page [1]
self.assertEquals(5, self.response.content.count('grade_A'))
# User 8 has 80 <= Homeworks < 90 [1]
# User 8 has 80 <= class < 90 [1]
# One use at the top of the page [1]
self.assertEquals(3, self.response.content.count('grade_B'))
# User 7 has 70 <= Homeworks < 80 [1]
# User 7 has 70 <= class < 80 [1]
# One use at the top of the page [1]
self.assertEquals(3, self.response.content.count('grade_C'))
# User 6 has 60 <= Homeworks < 70 [1]
# User 6 has 60 <= class < 70 [1]
# One use at the top of the page [1]
self.assertEquals(3, self.response.content.count('grade_C'))
# Users 1-5 have 60% > grades > 0 on Homeworks [5]
# Users 1-5 have 60% > grades > 0 on the class [5]
# One use at top of the page [1]
self.assertEquals(11, self.response.content.count('grade_F'))
# User 0 has 0 on Homeworks [1]
# User 0 has 0 on the class [1]
# One use at the top of the page [1]
self.assertEquals(3, self.response.content.count('grade_None'))
|
opensvn/test | refs/heads/master | src/study/list2.py | 1 | #!/bin/env python
x = raw_input('Enter some numbers: ')
aList = x
print aList
|
Andr3iC/juriscraper | refs/heads/master | opinions/united_states_backscrapers/state/nyappdiv_1st.py | 2 | # Back Scraper for New York Appellate Divisions 1st Dept.
# CourtID: nyappdiv_1st
# Court Short Name: NY
# Author: Andrei Chelaru
# Reviewer:
# Date: 2015-10-30
from ny import Site as NySite
class Site(NySite):
def __init__(self, *args, **kwargs):
super(Site, self).__init__(*args, **kwargs)
self.court = 'App+Div,+1st+Dept'
self.interval = 30
|
datenbetrieb/odoo | refs/heads/8.0 | addons/l10n_uy/__init__.py | 438 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2011 Openerp.uy <[email protected]>
# Proyecto de Localización de OperERP para Uruguay
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
|
upsuper/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/third_party/pytest/setup.py | 13 | import os
import sys
import setuptools
import pkg_resources
from setuptools import setup, Command
classifiers = [
'Development Status :: 6 - Mature',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
] + [
('Programming Language :: Python :: %s' % x)
for x in '2 2.7 3 3.4 3.5 3.6'.split()
]
with open('README.rst') as fd:
long_description = fd.read()
def has_environment_marker_support():
"""
Tests that setuptools has support for PEP-426 environment marker support.
The first known release to support it is 0.7 (and the earliest on PyPI seems to be 0.7.2
so we're using that), see: http://pythonhosted.org/setuptools/history.html#id142
References:
* https://wheel.readthedocs.io/en/latest/index.html#defining-conditional-dependencies
* https://www.python.org/dev/peps/pep-0426/#environment-markers
"""
try:
return pkg_resources.parse_version(setuptools.__version__) >= pkg_resources.parse_version('0.7.2')
except Exception as exc:
sys.stderr.write("Could not test setuptool's version: %s\n" % exc)
return False
def main():
extras_require = {}
install_requires = [
'py>=1.5.0',
'six>=1.10.0',
'setuptools',
'attrs>=17.2.0',
]
# if _PYTEST_SETUP_SKIP_PLUGGY_DEP is set, skip installing pluggy;
# used by tox.ini to test with pluggy master
if '_PYTEST_SETUP_SKIP_PLUGGY_DEP' not in os.environ:
install_requires.append('pluggy>=0.5,<0.7')
if has_environment_marker_support():
extras_require[':python_version<"3.0"'] = ['funcsigs']
extras_require[':sys_platform=="win32"'] = ['colorama']
else:
if sys.platform == 'win32':
install_requires.append('colorama')
if sys.version_info < (3, 0):
install_requires.append('funcsigs')
setup(
name='pytest',
description='pytest: simple powerful testing with Python',
long_description=long_description,
use_scm_version={
'write_to': '_pytest/_version.py',
},
url='http://pytest.org',
license='MIT license',
platforms=['unix', 'linux', 'osx', 'cygwin', 'win32'],
author=(
'Holger Krekel, Bruno Oliveira, Ronny Pfannschmidt, '
'Floris Bruynooghe, Brianna Laugher, Florian Bruhin and others'),
entry_points={'console_scripts': [
'pytest=pytest:main', 'py.test=pytest:main']},
classifiers=classifiers,
keywords="test unittest",
cmdclass={'test': PyTest},
# the following should be enabled for release
setup_requires=['setuptools-scm'],
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
install_requires=install_requires,
extras_require=extras_require,
packages=['_pytest', '_pytest.assertion', '_pytest._code'],
py_modules=['pytest'],
zip_safe=False,
)
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import subprocess
PPATH = [x for x in os.environ.get('PYTHONPATH', '').split(':') if x]
PPATH.insert(0, os.getcwd())
os.environ['PYTHONPATH'] = ':'.join(PPATH)
errno = subprocess.call([sys.executable, 'pytest.py', '--ignore=doc'])
raise SystemExit(errno)
if __name__ == '__main__':
main()
|
pchauncey/ansible | refs/heads/devel | lib/ansible/module_utils/aos.py | 28 | #
# Copyright (c) 2017 Apstra Inc, <[email protected]>
#
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""
This module adds shared support for Apstra AOS modules
In order to use this module, include it as part of your module
from ansible.module_utils.aos import (check_aos_version, get_aos_session, find_collection_item,
content_to_dict, do_load_resource)
"""
import json
from distutils.version import LooseVersion
try:
import yaml
HAS_YAML = True
except ImportError:
HAS_YAML = False
try:
from apstra.aosom.session import Session
HAS_AOS_PYEZ = True
except ImportError:
HAS_AOS_PYEZ = False
from ansible.module_utils._text import to_native
def check_aos_version(module, min=False):
"""
Check if the library aos-pyez is present.
If provided, also check if the minimum version requirement is met
"""
if not HAS_AOS_PYEZ:
module.fail_json(msg='aos-pyez is not installed. Please see details '
'here: https://github.com/Apstra/aos-pyez')
elif min:
import apstra.aosom
AOS_PYEZ_VERSION = apstra.aosom.__version__
if not LooseVersion(AOS_PYEZ_VERSION) >= LooseVersion(min):
module.fail_json(msg='aos-pyez >= %s is required for this module' % min)
return True
def get_aos_session(module, auth):
"""
Resume an existing session and return an AOS object.
Args:
auth (dict): An AOS session as obtained by aos_login module blocks::
dict( token=<token>,
server=<ip>,
port=<port>
)
Return:
Aos object
"""
check_aos_version(module)
aos = Session()
aos.session = auth
return aos
def find_collection_item(collection, item_name=False, item_id=False):
"""
Find collection_item based on name or id from a collection object
Both Collection_item and Collection Objects are provided by aos-pyez library
Return
collection_item: object corresponding to the collection type
"""
my_dict = None
if item_name:
my_dict = collection.find(label=item_name)
elif item_id:
my_dict = collection.find(uid=item_id)
if my_dict is None:
return collection['']
else:
return my_dict
def content_to_dict(module, content):
"""
Convert 'content' into a Python Dict based on 'content_format'
"""
# if not HAS_YAML:
# module.fail_json(msg="Python Library Yaml is not present, mandatory to use 'content'")
content_dict = None
# try:
# content_dict = json.loads(content.replace("\'", '"'))
# except:
# module.fail_json(msg="Unable to convert 'content' from JSON, please check if valid")
#
# elif format in ['yaml', 'var']:
try:
content_dict = yaml.safe_load(content)
if not isinstance(content_dict, dict):
raise Exception()
# Check if dict is empty and return an error if it's
if not content_dict:
raise Exception()
except:
module.fail_json(msg="Unable to convert 'content' to a dict, please check if valid")
# replace the string with the dict
module.params['content'] = content_dict
return content_dict
def do_load_resource(module, collection, name):
"""
Create a new object (collection.item) by loading a datastructure directly
"""
try:
item = find_collection_item(collection, name, '')
except:
module.fail_json(msg="An error occurred while running 'find_collection_item'")
if item.exists:
module.exit_json(changed=False, name=item.name, id=item.id, value=item.value)
# If not in check mode, apply the changes
if not module.check_mode:
try:
item.datum = module.params['content']
item.write()
except Exception as e:
module.fail_json(msg="Unable to write item content : %r" % to_native(e))
module.exit_json(changed=True, name=item.name, id=item.id, value=item.value)
|
beakman/droidlab | refs/heads/master | config/urls.py | 1 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from django.views import defaults as default_views
from rest_framework.authtoken import views
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name='home'),
url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name='about'),
# Django Admin, use {% url 'admin:index' %}
url(settings.ADMIN_URL, admin.site.urls),
# User management
url(r'^users/', include('droidlab.users.urls', namespace='users')),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls includes go here
url(r'^api/', include('droidlab.api.urls', namespace='api')),
# Url to obtain Auth Token:
# i.e: http POST 127.0.0.1:8000/api-token-auth/ username='admin' password='whatever'
url(r'^api-token-auth/', views.obtain_auth_token),
# REST Auth urls
url(r'^rest-auth/', include('rest_auth.urls')),
url(r'^rest-auth/registration/', include('rest_auth.registration.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
chris-chambers/llvm | refs/heads/sbbm | test/CodeGen/SystemZ/Large/branch-range-12.py | 13 | # Test 64-bit COMPARE LOGICAL IMMEDIATE AND BRANCH in cases where the sheer
# number of instructions causes some branches to be out of range.
# RUN: python %s | llc -mtriple=s390x-linux-gnu | FileCheck %s
# Construct:
#
# before0:
# conditional branch to after0
# ...
# beforeN:
# conditional branch to after0
# main:
# 0xffb4 bytes, from MVIY instructions
# conditional branch to main
# after0:
# ...
# conditional branch to main
# afterN:
#
# Each conditional branch sequence occupies 18 bytes if it uses a short
# branch and 24 if it uses a long one. The ones before "main:" have to
# take the branch length into account, which is 6 for short branches,
# so the final (0x4c - 6) / 18 == 3 blocks can use short branches.
# The ones after "main:" do not, so the first 0x4c / 18 == 4 blocks
# can use short branches. The conservative algorithm we use makes
# one of the forward branches unnecessarily long, as noted in the
# check output below.
#
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgfi [[REG]], 50
# CHECK: jgl [[LABEL:\.L[^ ]*]]
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgfi [[REG]], 51
# CHECK: jgl [[LABEL]]
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgfi [[REG]], 52
# CHECK: jgl [[LABEL]]
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgfi [[REG]], 53
# CHECK: jgl [[LABEL]]
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgfi [[REG]], 54
# CHECK: jgl [[LABEL]]
# ...as mentioned above, the next one could be a CLGIJL instead...
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgfi [[REG]], 55
# CHECK: jgl [[LABEL]]
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgijl [[REG]], 56, [[LABEL]]
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgijl [[REG]], 57, [[LABEL]]
# ...main goes here...
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgijl [[REG]], 100, [[LABEL:\.L[^ ]*]]
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgijl [[REG]], 101, [[LABEL]]
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgijl [[REG]], 102, [[LABEL]]
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgijl [[REG]], 103, [[LABEL]]
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgfi [[REG]], 104
# CHECK: jgl [[LABEL]]
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgfi [[REG]], 105
# CHECK: jgl [[LABEL]]
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgfi [[REG]], 106
# CHECK: jgl [[LABEL]]
# CHECK: lg [[REG:%r[0-5]]], 0(%r3)
# CHECK: sg [[REG]], 0(%r4)
# CHECK: clgfi [[REG]], 107
# CHECK: jgl [[LABEL]]
branch_blocks = 8
main_size = 0xffb4
print 'define void @f1(i8 *%base, i64 *%stopa, i64 *%stopb) {'
print 'entry:'
print ' br label %before0'
print ''
for i in xrange(branch_blocks):
next = 'before%d' % (i + 1) if i + 1 < branch_blocks else 'main'
print 'before%d:' % i
print ' %%bcur%da = load i64 *%%stopa' % i
print ' %%bcur%db = load i64 *%%stopb' % i
print ' %%bsub%d = sub i64 %%bcur%da, %%bcur%db' % (i, i, i)
print ' %%btest%d = icmp ult i64 %%bsub%d, %d' % (i, i, i + 50)
print ' br i1 %%btest%d, label %%after0, label %%%s' % (i, next)
print ''
print '%s:' % next
a, b = 1, 1
for i in xrange(0, main_size, 6):
a, b = b, a + b
offset = 4096 + b % 500000
value = a % 256
print ' %%ptr%d = getelementptr i8 *%%base, i64 %d' % (i, offset)
print ' store volatile i8 %d, i8 *%%ptr%d' % (value, i)
for i in xrange(branch_blocks):
print ' %%acur%da = load i64 *%%stopa' % i
print ' %%acur%db = load i64 *%%stopb' % i
print ' %%asub%d = sub i64 %%acur%da, %%acur%db' % (i, i, i)
print ' %%atest%d = icmp ult i64 %%asub%d, %d' % (i, i, i + 100)
print ' br i1 %%atest%d, label %%main, label %%after%d' % (i, i)
print ''
print 'after%d:' % i
print ' ret void'
print '}'
|
arannasousa/pagseguro_xml | refs/heads/master | pagseguro_xml/pagamento/v2/classes/erros.py | 2 | # coding=utf-8
# ---------------------------------------------------------------
# Desenvolvedor: Arannã Sousa Santos
# Mês: 12
# Ano: 2015
# Projeto: pagseguro_xml
# e-mail: [email protected]
# ---------------------------------------------------------------
from __future__ import division, print_function, unicode_literals
from ....core.base_classes import ABERTURA, TagCaracter, XMLAPI
from ....core import CONST
class Error(XMLAPI):
def __init__(self):
super(Error, self).__init__()
self.code = TagCaracter(nome=u'code', raiz=u'//error', opcoes=CONST.CODE.opcoes.keys())
self.message = TagCaracter(nome=u'message', raiz=u'//error')
def get_xml(self):
xml = XMLAPI.get_xml(self)
xml += u'<error>'
xml += self.code.xml
xml += self.message.xml
xml += u'</error>'
return xml
def set_xml(self, arquivo):
if self._le_xml(arquivo):
self.code.xml = arquivo
self.message.xml = arquivo
xml = property(get_xml, set_xml)
def get_alertas(self):
alertas = []
alertas.extend(self.code.alertas)
alertas.extend(self.message.alertas)
return alertas
alertas = property(get_alertas)
class ClassePagamentoErros(XMLAPI):
def __init__(self):
super(ClassePagamentoErros, self).__init__()
self.errors = []
def get_xml(self):
xml = XMLAPI.get_xml(self)
xml += ABERTURA
xml += u'<errors>'
for erro in self.errors:
xml += erro.xml
xml += u'</errors>'
return xml
def set_xml(self, arquivo):
if self._le_xml(arquivo):
self.errors = self.le_grupo(u'//errors/error', Error)
xml = property(get_xml, set_xml)
def get_alertas(self):
alertas = []
for error in self.errors:
alertas.extend(error.alertas)
return alertas
alertas = property(get_alertas) |
turbulencia/tlab | refs/heads/master | scripts/python/NcReduceLevels.py | 2 | import netCDF4 as nc
import numpy as np
import sys
# getting data from stdin
if ( len(sys.argv) <= 1 ):
print("Usage: python $0 [flow,scal,tower] list-of-files.")
quit()
filetype = sys.argv[1]
setoffiles = sorted(sys.argv[2:])
# processing data
if ( filetype == 'tower' ):
# setoflevels = np.arange(0,100,10,dtype=np.int16)
setoflevels = np.array([0, 147, 276, 356, 419, 479, 537, 595, 652],dtype=np.int16)
setofvars = 'u v w p s'.split(' ')
# elif ( filetype == 'scal' ):
# setoflevels = np.arange(0,100,10,dtype=np.int16)
# elif ( filetype == 'flow' ):
# setoflevels = np.arange(0,100,10,dtype=np.int16)
else:
print("Error: file type not supported.")
quit()
for file in setoffiles:
print("Processing file %s ..." % file)
file_org = nc.Dataset(file, 'r')
file_dst = nc.Dataset(file.replace('.nc','_r.nc'), 'w')
# read independent variables from origin nc-file
x_org = file_org.variables['x'][:]
y_org = file_org.variables['y'][setoflevels]
z_org = file_org.variables['z'][:]
t_org = file_org.variables['t'][:]
print(y_org)
# create dimensions for destiny nc-file
file_dst.createDimension('x',len(x_org))
file_dst.createDimension('y',len(y_org))
file_dst.createDimension('z',len(z_org))
file_dst.createDimension('t',len(t_org))
# create and write independent variables in destiny nc-file
x_dst = file_dst.createVariable('x', 'f4', ('x',))
y_dst = file_dst.createVariable('y', 'f4', ('y',))
z_dst = file_dst.createVariable('z', 'f4', ('z',))
t_dst = file_dst.createVariable('t', 'f4', ('t',))
x_dst[:] = x_org[:]
y_dst[:] = y_org[:]
z_dst[:] = z_org[:]
t_dst[:] = t_org[:]
# read and write iteration numbers
it_org = file_org.variables['it'][:]
it_dst = file_dst.createVariable('it', 'i4', ('t',))
it_dst[:] = it_org[:]
# read and write other dependent variables
for var in setofvars:
var_org = file_org.variables[var][:,:,:,setoflevels]
var_dst = file_dst.createVariable(var, 'f4', ('t','z','x','y',))
var_dst[:] = var_org[:]
file_dst.close()
|
frascoweb/frasco | refs/heads/master | frasco/users/otp.py | 1 | from flask import current_app
from frasco.ext import get_extension_state
from frasco.mail import send_mail
import pyotp
from .password import hash_password, generate_random_password
def generate_otp_code(user):
user.otp_code = pyotp.random_base32()
return generate_otpauth_url(user)
def enable_2fa(user, code):
if not verify_2fa(user, code):
return False
user.two_factor_auth_enabled = True
recovery_code = generate_otp_recovery_code(user)
send_mail(user.email, 'users/2fa_enabled', recovery_code=recovery_code)
return recovery_code
def disable_2fa(user):
if not user.two_factor_auth_enabled:
return
user.two_factor_auth_enabled = False
user.otp_code = None
user.otp_recovery_code = None
send_mail(user.email, 'users/2fa_disabled')
def verify_2fa(user, code):
if not user.otp_code:
return False
totp = pyotp.TOTP(user.otp_code)
return totp.verify(code)
def generate_otp_recovery_code(user):
recovery_code = generate_random_password()
user.otp_recovery_code = hash_password(recovery_code)
return recovery_code
def generate_otpauth_url(user):
return pyotp.totp.TOTP(user.otp_code).provisioning_uri(user.email,
issuer_name=get_extension_state('frasco_users').options['2fa_issuer_name'] or current_app.config.get('TITLE'))
|
ArcherSys/ArcherSys | refs/heads/master | skulpt/test/run/t174.py | 1 | longvalue = 9999999999999L
print "%3ld" % 42
print "%d" % 42
print "%d" % 42.0
print "%d" % longvalue
print "%07.2f" % 42
print "%07.2F" % 42
print "%(foo)s" % { 'foo': 'bar' }
#print "%((foo))s" % { '(foo)': 'bar' }
print "%sx" % (103*'a')
|
postfix/androguard | refs/heads/master | demos/dad_print.py | 61 | #!/usr/bin/env python
# This file is part of Androguard.
#
# Copyright (c) 2012 Geoffroy Gueguen <[email protected]>
# All Rights Reserved.
#
# Androguard is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androguard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
import sys
sys.path.append('./')
from androguard.core.bytecodes import apk, dvm
from androguard.core.analysis.analysis import uVMAnalysis
from androguard.decompiler.dad.decompile import DvMethod
from androguard.decompiler.dad.instruction import Constant, BinaryCompExpression
class PrintVisitor(object):
def __init__(self, graph):
self.graph = graph
self.visited_nodes = set()
self.loop_follow = [None]
self.latch_node = [None]
self.if_follow = [None]
self.switch_follow = [None]
self.next_case = None
def visit_ins(self, ins):
return ins.visit(self)
def visit_node(self, node):
if node in (self.if_follow[-1], self.switch_follow[-1],
self.loop_follow[-1], self.latch_node[-1]):
return
if node in self.visited_nodes:
return
self.visited_nodes.add(node)
node.visit(self)
def visit_loop_node(self, loop):
print '- Loop node', loop.num
follow = loop.get_loop_follow()
if follow is None and not loop.looptype.endless():
exit('Loop has no follow !', 'error')
if loop.looptype.pretest():
if loop.true is follow:
loop.neg()
loop.true, loop.false = loop.false, loop.true
cnd = loop.visit_cond(self)
print 'while(%s) {' % cnd
elif loop.looptype.posttest():
print 'do {'
self.latch_node.append(loop.latch)
elif loop.looptype.endless():
print 'while(true) {'
pass
self.loop_follow.append(follow)
if loop.looptype.pretest():
self.visit_node(loop.true)
else:
self.visit_node(loop.cond)
self.loop_follow.pop()
if loop.looptype.pretest():
print '}'
elif loop.looptype.posttest():
print '} while(',
self.latch_node.pop()
loop.latch.visit_cond(self)
print ')'
else:
self.visit_node(loop.latch)
if follow is not None:
self.visit_node(follow)
def visit_cond_node(self, cond):
print '- Cond node', cond.num
follow = cond.get_if_follow()
if cond.false is self.loop_follow[-1]:
cond.neg()
cond.true, cond.false = cond.false, cond.true
cond.visit_cond(self)
self.visit_node(cond.false)
elif follow is not None:
is_else = not (follow in (cond.true, cond.false))
if (cond.true in (follow, self.next_case)
or cond.num > cond.true.num):
cond.neg()
cond.true, cond.false = cond.false, cond.true
self.if_follow.append(follow)
if not cond.true in self.visited_nodes:
cnd = cond.visit_cond(self)
print 'if (%s) {' % cnd
self.visit_node(cond.true)
if is_else and not cond.false in self.visited_nodes:
print '} else {'
self.visit_node(cond.false)
print '}'
self.if_follow.pop()
self.visit_node(follow)
else:
cond.visit_cond(self)
self.visit_node(cond.true)
self.visit_node(cond.false)
def visit_short_circuit_condition(self, nnot, aand, cond1, cond2):
if nnot:
cond1.neg()
cond1.visit_cond(self)
cond2.visit_cond(self)
def visit_switch_node(self, switch):
lins = switch.get_ins()
for ins in lins[:-1]:
self.visit_ins(ins)
switch_ins = switch.get_ins()[-1]
self.visit_ins(switch_ins)
follow = switch.switch_follow
cases = switch.cases
self.switch_follow.append(follow)
default = switch.default
for i, node in enumerate(cases):
if node in self.visited_nodes:
continue
for case in switch.node_to_case[node]:
pass
if i + 1 < len(cases):
self.next_case = cases[i + 1]
else:
self.next_case = None
if node is default:
default = None
self.visit_node(node)
if default not in (None, follow):
self.visit_node(default)
self.switch_follow.pop()
self.visit_node(follow)
def visit_statement_node(self, stmt):
print '- Statement node', stmt.num
sucs = self.graph.sucs(stmt)
for ins in stmt.get_ins():
self.visit_ins(ins)
if len(sucs) == 0:
return
follow = sucs[0]
self.visit_node(follow)
def visit_return_node(self, ret):
print '- Return node', ret.num
for ins in ret.get_ins():
self.visit_ins(ins)
def visit_throw_node(self, throw):
for ins in throw.get_ins():
self.visit_ins(ins)
def visit_constant(self, cst):
return cst
def visit_base_class(self, cls):
return cls
def visit_variable(self, var):
return 'v%s' % var
def visit_param(self, param):
return 'p%s' % param
def visit_this(self):
return 'this'
def visit_assign(self, lhs, rhs):
if lhs is None:
rhs.visit(self)
return
l = lhs.visit(self)
r = rhs.visit(self)
print '%s = %s;' % (l, r)
def visit_move_result(self, lhs, rhs):
l = lhs.visit(self)
r = rhs.visit(self)
print '%s = %s;' % (l, r)
def visit_move(self, lhs, rhs):
if lhs is rhs:
return
l = lhs.visit(self)
r = rhs.visit(self)
print '%s = %s;' % (l, r)
def visit_astore(self, array, index, rhs):
arr = array.visit(self)
if isinstance(index, Constant):
idx = index.visit(self, 'I')
else:
idx = index.visit(self)
r = rhs.visit(self)
print '%s[%s] = %s' % (arr, idx, r)
def visit_put_static(self, cls, name, rhs):
r = rhs.visit(self)
return '%s.%s = %s' % (cls, name, r)
def visit_put_instance(self, lhs, name, rhs):
l = lhs.visit(self)
r = rhs.visit(self)
return '%s.%s = %s' % (l, name, r)
def visit_new(self, atype):
pass
def visit_invoke(self, name, base, args):
base.visit(self)
for arg in args:
arg.visit(self)
def visit_return_void(self):
print 'return;'
def visit_return(self, arg):
a = arg.visit(self)
print 'return %s;' % a
def visit_nop(self):
pass
def visit_switch(self, arg):
arg.visit(self)
def visit_check_cast(self, arg, atype):
arg.visit(self)
def visit_aload(self, array, index):
arr = array.visit(self)
idx = index.visit(self)
return '%s[%s]' % (arr, idx)
def visit_alength(self, array):
res = array.visit(self)
return '%s.length' % res
def visit_new_array(self, atype, size):
size.visit(self)
def visit_filled_new_array(self, atype, size, args):
atype.visit(self)
size.visit(self)
for arg in args:
arg.visit(self)
def visit_fill_array(self, array, value):
array.visit(self)
def visit_monitor_enter(self, ref):
ref.visit(self)
def visit_monitor_exit(self, ref):
pass
def visit_throw(self, ref):
ref.visit(self)
def visit_binary_expression(self, op, arg1, arg2):
val1 = arg1.visit(self)
val2 = arg2.visit(self)
return '%s %s %s' % (val1, op, val2)
def visit_unary_expression(self, op, arg):
arg.visit(self)
def visit_cast(self, op, arg):
a = arg.visit(self)
return '(%s %s)' % (op, a)
def visit_cond_expression(self, op, arg1, arg2):
val1 = arg1.visit(self)
val2 = arg2.visit(self)
return '%s %s %s' % (val1, op, val2)
def visit_condz_expression(self, op, arg):
if isinstance(arg, BinaryCompExpression):
arg.op = op
arg.visit(self)
else:
arg.visit(self)
def visit_get_instance(self, arg, name):
arg.visit(self)
def visit_get_static(self, cls, name):
return '%s.%s' % (cls, name)
TEST = '../DroidDream/magicspiral.apk'
vm = dvm.DalvikVMFormat(apk.APK(TEST).get_dex())
vma = uVMAnalysis(vm)
method = vm.get_method('crypt')[0]
method.show()
amethod = vma.get_method(method)
dvmethod = DvMethod(amethod)
dvmethod.process() # build IR Form / control flow...
graph = dvmethod.graph
print 'Entry block : %s\n' % graph.get_entry()
for block in graph: # graph.get_rpo() to iterate in reverse post order
print 'Block : %s' % block
for ins in block.get_ins():
print ' - %s' % ins
print
visitor = PrintVisitor(graph)
graph.get_entry().visit(visitor)
|
craynot/django | refs/heads/master | tests/shortcuts/tests.py | 132 | from django.test import SimpleTestCase, override_settings
from django.test.utils import require_jinja2
@override_settings(
ROOT_URLCONF='shortcuts.urls',
)
class ShortcutTests(SimpleTestCase):
def test_render_to_response(self):
response = self.client.get('/render_to_response/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'FOO.BAR..\n')
self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8')
def test_render_to_response_with_multiple_templates(self):
response = self.client.get('/render_to_response/multiple_templates/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'FOO.BAR..\n')
def test_render_to_response_with_content_type(self):
response = self.client.get('/render_to_response/content_type/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'FOO.BAR..\n')
self.assertEqual(response['Content-Type'], 'application/x-rendertest')
def test_render_to_response_with_status(self):
response = self.client.get('/render_to_response/status/')
self.assertEqual(response.status_code, 403)
self.assertEqual(response.content, b'FOO.BAR..\n')
@require_jinja2
def test_render_to_response_with_using(self):
response = self.client.get('/render_to_response/using/')
self.assertEqual(response.content, b'DTL\n')
response = self.client.get('/render_to_response/using/?using=django')
self.assertEqual(response.content, b'DTL\n')
response = self.client.get('/render_to_response/using/?using=jinja2')
self.assertEqual(response.content, b'Jinja2\n')
def test_render(self):
response = self.client.get('/render/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'FOO.BAR../render/\n')
self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8')
self.assertFalse(hasattr(response.context.request, 'current_app'))
def test_render_with_multiple_templates(self):
response = self.client.get('/render/multiple_templates/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'FOO.BAR../render/multiple_templates/\n')
def test_render_with_content_type(self):
response = self.client.get('/render/content_type/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'FOO.BAR../render/content_type/\n')
self.assertEqual(response['Content-Type'], 'application/x-rendertest')
def test_render_with_status(self):
response = self.client.get('/render/status/')
self.assertEqual(response.status_code, 403)
self.assertEqual(response.content, b'FOO.BAR../render/status/\n')
@require_jinja2
def test_render_with_using(self):
response = self.client.get('/render/using/')
self.assertEqual(response.content, b'DTL\n')
response = self.client.get('/render/using/?using=django')
self.assertEqual(response.content, b'DTL\n')
response = self.client.get('/render/using/?using=jinja2')
self.assertEqual(response.content, b'Jinja2\n')
|
semonte/intellij-community | refs/heads/master | python/testData/inspections/PyUnresolvedReferencesInspection/listIndexedByUnknownType.py | 30 | def f(i):
xs = []
xs[i].items()
def g(index):
x = [][index]
x['foo']
|
minhphung171093/GreenERP | refs/heads/master | openerp/addons/website_hr_recruitment/models/__init__.py | 44 | import hr_job
import hr_recruitment_source
import hr_applicant
|
QianBIG/odoo | refs/heads/8.0 | addons/document/__openerp__.py | 260 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Document Management System',
'version': '2.1',
'category': 'Knowledge Management',
'description': """
This is a complete document management system.
==============================================
* User Authentication
* Document Indexation:- .pptx and .docx files are not supported in Windows platform.
* Dashboard for Document that includes:
* New Files (list)
* Files by Resource Type (graph)
* Files by Partner (graph)
* Files Size by Month (graph)
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com',
'depends': ['knowledge', 'mail'],
'data': [
'security/document_security.xml',
'document_view.xml',
'document_data.xml',
'wizard/document_configuration_view.xml',
'security/ir.model.access.csv',
'report/document_report_view.xml',
'views/document.xml',
],
'demo': [ 'document_demo.xml' ],
'test': ['test/document_test2.yml'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
kartikdhar/djangotest | refs/heads/master | virt1/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree.py | 915 | from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type
import re
from . import _base
from .. import ihatexml
from .. import constants
from ..constants import namespaces
from ..utils import moduleFactoryFactory
tag_regexp = re.compile("{([^}]*)}(.*)")
def getETreeBuilder(ElementTreeImplementation, fullTree=False):
ElementTree = ElementTreeImplementation
ElementTreeCommentType = ElementTree.Comment("asd").tag
class Element(_base.Node):
def __init__(self, name, namespace=None):
self._name = name
self._namespace = namespace
self._element = ElementTree.Element(self._getETreeTag(name,
namespace))
if namespace is None:
self.nameTuple = namespaces["html"], self._name
else:
self.nameTuple = self._namespace, self._name
self.parent = None
self._childNodes = []
self._flags = []
def _getETreeTag(self, name, namespace):
if namespace is None:
etree_tag = name
else:
etree_tag = "{%s}%s" % (namespace, name)
return etree_tag
def _setName(self, name):
self._name = name
self._element.tag = self._getETreeTag(self._name, self._namespace)
def _getName(self):
return self._name
name = property(_getName, _setName)
def _setNamespace(self, namespace):
self._namespace = namespace
self._element.tag = self._getETreeTag(self._name, self._namespace)
def _getNamespace(self):
return self._namespace
namespace = property(_getNamespace, _setNamespace)
def _getAttributes(self):
return self._element.attrib
def _setAttributes(self, attributes):
# Delete existing attributes first
# XXX - there may be a better way to do this...
for key in list(self._element.attrib.keys()):
del self._element.attrib[key]
for key, value in attributes.items():
if isinstance(key, tuple):
name = "{%s}%s" % (key[2], key[1])
else:
name = key
self._element.set(name, value)
attributes = property(_getAttributes, _setAttributes)
def _getChildNodes(self):
return self._childNodes
def _setChildNodes(self, value):
del self._element[:]
self._childNodes = []
for element in value:
self.insertChild(element)
childNodes = property(_getChildNodes, _setChildNodes)
def hasContent(self):
"""Return true if the node has children or text"""
return bool(self._element.text or len(self._element))
def appendChild(self, node):
self._childNodes.append(node)
self._element.append(node._element)
node.parent = self
def insertBefore(self, node, refNode):
index = list(self._element).index(refNode._element)
self._element.insert(index, node._element)
node.parent = self
def removeChild(self, node):
self._element.remove(node._element)
node.parent = None
def insertText(self, data, insertBefore=None):
if not(len(self._element)):
if not self._element.text:
self._element.text = ""
self._element.text += data
elif insertBefore is None:
# Insert the text as the tail of the last child element
if not self._element[-1].tail:
self._element[-1].tail = ""
self._element[-1].tail += data
else:
# Insert the text before the specified node
children = list(self._element)
index = children.index(insertBefore._element)
if index > 0:
if not self._element[index - 1].tail:
self._element[index - 1].tail = ""
self._element[index - 1].tail += data
else:
if not self._element.text:
self._element.text = ""
self._element.text += data
def cloneNode(self):
element = type(self)(self.name, self.namespace)
for name, value in self.attributes.items():
element.attributes[name] = value
return element
def reparentChildren(self, newParent):
if newParent.childNodes:
newParent.childNodes[-1]._element.tail += self._element.text
else:
if not newParent._element.text:
newParent._element.text = ""
if self._element.text is not None:
newParent._element.text += self._element.text
self._element.text = ""
_base.Node.reparentChildren(self, newParent)
class Comment(Element):
def __init__(self, data):
# Use the superclass constructor to set all properties on the
# wrapper element
self._element = ElementTree.Comment(data)
self.parent = None
self._childNodes = []
self._flags = []
def _getData(self):
return self._element.text
def _setData(self, value):
self._element.text = value
data = property(_getData, _setData)
class DocumentType(Element):
def __init__(self, name, publicId, systemId):
Element.__init__(self, "<!DOCTYPE>")
self._element.text = name
self.publicId = publicId
self.systemId = systemId
def _getPublicId(self):
return self._element.get("publicId", "")
def _setPublicId(self, value):
if value is not None:
self._element.set("publicId", value)
publicId = property(_getPublicId, _setPublicId)
def _getSystemId(self):
return self._element.get("systemId", "")
def _setSystemId(self, value):
if value is not None:
self._element.set("systemId", value)
systemId = property(_getSystemId, _setSystemId)
class Document(Element):
def __init__(self):
Element.__init__(self, "DOCUMENT_ROOT")
class DocumentFragment(Element):
def __init__(self):
Element.__init__(self, "DOCUMENT_FRAGMENT")
def testSerializer(element):
rv = []
def serializeElement(element, indent=0):
if not(hasattr(element, "tag")):
element = element.getroot()
if element.tag == "<!DOCTYPE>":
if element.get("publicId") or element.get("systemId"):
publicId = element.get("publicId") or ""
systemId = element.get("systemId") or ""
rv.append("""<!DOCTYPE %s "%s" "%s">""" %
(element.text, publicId, systemId))
else:
rv.append("<!DOCTYPE %s>" % (element.text,))
elif element.tag == "DOCUMENT_ROOT":
rv.append("#document")
if element.text is not None:
rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
if element.tail is not None:
raise TypeError("Document node cannot have tail")
if hasattr(element, "attrib") and len(element.attrib):
raise TypeError("Document node cannot have attributes")
elif element.tag == ElementTreeCommentType:
rv.append("|%s<!-- %s -->" % (' ' * indent, element.text))
else:
assert isinstance(element.tag, text_type), \
"Expected unicode, got %s, %s" % (type(element.tag), element.tag)
nsmatch = tag_regexp.match(element.tag)
if nsmatch is None:
name = element.tag
else:
ns, name = nsmatch.groups()
prefix = constants.prefixes[ns]
name = "%s %s" % (prefix, name)
rv.append("|%s<%s>" % (' ' * indent, name))
if hasattr(element, "attrib"):
attributes = []
for name, value in element.attrib.items():
nsmatch = tag_regexp.match(name)
if nsmatch is not None:
ns, name = nsmatch.groups()
prefix = constants.prefixes[ns]
attr_string = "%s %s" % (prefix, name)
else:
attr_string = name
attributes.append((attr_string, value))
for name, value in sorted(attributes):
rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
if element.text:
rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
indent += 2
for child in element:
serializeElement(child, indent)
if element.tail:
rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail))
serializeElement(element, 0)
return "\n".join(rv)
def tostring(element):
"""Serialize an element and its child nodes to a string"""
rv = []
filter = ihatexml.InfosetFilter()
def serializeElement(element):
if isinstance(element, ElementTree.ElementTree):
element = element.getroot()
if element.tag == "<!DOCTYPE>":
if element.get("publicId") or element.get("systemId"):
publicId = element.get("publicId") or ""
systemId = element.get("systemId") or ""
rv.append("""<!DOCTYPE %s PUBLIC "%s" "%s">""" %
(element.text, publicId, systemId))
else:
rv.append("<!DOCTYPE %s>" % (element.text,))
elif element.tag == "DOCUMENT_ROOT":
if element.text is not None:
rv.append(element.text)
if element.tail is not None:
raise TypeError("Document node cannot have tail")
if hasattr(element, "attrib") and len(element.attrib):
raise TypeError("Document node cannot have attributes")
for child in element:
serializeElement(child)
elif element.tag == ElementTreeCommentType:
rv.append("<!--%s-->" % (element.text,))
else:
# This is assumed to be an ordinary element
if not element.attrib:
rv.append("<%s>" % (filter.fromXmlName(element.tag),))
else:
attr = " ".join(["%s=\"%s\"" % (
filter.fromXmlName(name), value)
for name, value in element.attrib.items()])
rv.append("<%s %s>" % (element.tag, attr))
if element.text:
rv.append(element.text)
for child in element:
serializeElement(child)
rv.append("</%s>" % (element.tag,))
if element.tail:
rv.append(element.tail)
serializeElement(element)
return "".join(rv)
class TreeBuilder(_base.TreeBuilder):
documentClass = Document
doctypeClass = DocumentType
elementClass = Element
commentClass = Comment
fragmentClass = DocumentFragment
implementation = ElementTreeImplementation
def testSerializer(self, element):
return testSerializer(element)
def getDocument(self):
if fullTree:
return self.document._element
else:
if self.defaultNamespace is not None:
return self.document._element.find(
"{%s}html" % self.defaultNamespace)
else:
return self.document._element.find("html")
def getFragment(self):
return _base.TreeBuilder.getFragment(self)._element
return locals()
getETreeModule = moduleFactoryFactory(getETreeBuilder)
|
stevemarple/Arduino-org | refs/heads/ide-org-1.6.1.x | arduino-core/src/processing/app/i18n/python/requests/packages/urllib3/poolmanager.py | 168 | # urllib3/poolmanager.py
# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import logging
from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from .connectionpool import connection_from_url, port_by_scheme
from .request import RequestMethods
from .util import parse_url
__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
pool_classes_by_scheme = {
'http': HTTPConnectionPool,
'https': HTTPSConnectionPool,
}
log = logging.getLogger(__name__)
class PoolManager(RequestMethods):
"""
Allows for arbitrary requests while transparently keeping track of
necessary connection pools for you.
:param num_pools:
Number of connection pools to cache before discarding the least
recently used pool.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param \**connection_pool_kw:
Additional parameters are used to create fresh
:class:`urllib3.connectionpool.ConnectionPool` instances.
Example: ::
>>> manager = PoolManager(num_pools=2)
>>> r = manager.request('GET', 'http://google.com/')
>>> r = manager.request('GET', 'http://google.com/mail')
>>> r = manager.request('GET', 'http://yahoo.com/')
>>> len(manager.pools)
2
"""
def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
RequestMethods.__init__(self, headers)
self.connection_pool_kw = connection_pool_kw
self.pools = RecentlyUsedContainer(num_pools,
dispose_func=lambda p: p.close())
def _new_pool(self, scheme, host, port):
"""
Create a new :class:`ConnectionPool` based on host, port and scheme.
This method is used to actually create the connection pools handed out
by :meth:`connection_from_url` and companion methods. It is intended
to be overridden for customization.
"""
pool_cls = pool_classes_by_scheme[scheme]
return pool_cls(host, port, **self.connection_pool_kw)
def clear(self):
"""
Empty our store of pools and direct them all to close.
This will not affect in-flight connections, but they will not be
re-used after completion.
"""
self.pools.clear()
def connection_from_host(self, host, port=None, scheme='http'):
"""
Get a :class:`ConnectionPool` based on the host, port, and scheme.
If ``port`` isn't given, it will be derived from the ``scheme`` using
``urllib3.connectionpool.port_by_scheme``.
"""
scheme = scheme or 'http'
port = port or port_by_scheme.get(scheme, 80)
pool_key = (scheme, host, port)
# If the scheme, host, or port doesn't match existing open connections,
# open a new ConnectionPool.
pool = self.pools.get(pool_key)
if pool:
return pool
# Make a fresh ConnectionPool of the desired type
pool = self._new_pool(scheme, host, port)
self.pools[pool_key] = pool
return pool
def connection_from_url(self, url):
"""
Similar to :func:`urllib3.connectionpool.connection_from_url` but
doesn't pass any additional parameters to the
:class:`urllib3.connectionpool.ConnectionPool` constructor.
Additional parameters are taken from the :class:`.PoolManager`
constructor.
"""
u = parse_url(url)
return self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
def urlopen(self, method, url, redirect=True, **kw):
"""
Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
with custom cross-host redirect logic and only sends the request-uri
portion of the ``url``.
The given ``url`` parameter must be absolute, such that an appropriate
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
"""
u = parse_url(url)
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
kw['assert_same_host'] = False
kw['redirect'] = False
if 'headers' not in kw:
kw['headers'] = self.headers
response = conn.urlopen(method, u.request_uri, **kw)
redirect_location = redirect and response.get_redirect_location()
if not redirect_location:
return response
if response.status == 303:
method = 'GET'
log.info("Redirecting %s -> %s" % (url, redirect_location))
kw['retries'] = kw.get('retries', 3) - 1 # Persist retries countdown
return self.urlopen(method, redirect_location, **kw)
class ProxyManager(RequestMethods):
"""
Given a ConnectionPool to a proxy, the ProxyManager's ``urlopen`` method
will make requests to any url through the defined proxy. The ProxyManager
class will automatically set the 'Host' header if it is not provided.
"""
def __init__(self, proxy_pool):
self.proxy_pool = proxy_pool
def _set_proxy_headers(self, url, headers=None):
"""
Sets headers needed by proxies: specifically, the Accept and Host
headers. Only sets headers not provided by the user.
"""
headers_ = {'Accept': '*/*'}
host = parse_url(url).host
if host:
headers_['Host'] = host
if headers:
headers_.update(headers)
return headers_
def urlopen(self, method, url, **kw):
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
kw['assert_same_host'] = False
kw['headers'] = self._set_proxy_headers(url, headers=kw.get('headers'))
return self.proxy_pool.urlopen(method, url, **kw)
def proxy_from_url(url, **pool_kw):
proxy_pool = connection_from_url(url, **pool_kw)
return ProxyManager(proxy_pool)
|
sridevikoushik31/nova | refs/heads/port_id_in_vif_on_devide | nova/tests/conductor/test_conductor.py | 1 | # Copyright 2012 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for the conductor service."""
import mox
from nova.api.ec2 import ec2utils
from nova.compute import flavors
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova import conductor
from nova.conductor import api as conductor_api
from nova.conductor import manager as conductor_manager
from nova.conductor import rpcapi as conductor_rpcapi
from nova import context
from nova import db
from nova.db.sqlalchemy import models
from nova import exception as exc
from nova import notifications
from nova.openstack.common import jsonutils
from nova.openstack.common.rpc import common as rpc_common
from nova.openstack.common import timeutils
from nova import quota
from nova import test
FAKE_IMAGE_REF = 'fake-image-ref'
class FakeContext(context.RequestContext):
def elevated(self):
"""Return a consistent elevated context so we can detect it."""
if not hasattr(self, '_elevated'):
self._elevated = super(FakeContext, self).elevated()
return self._elevated
class _BaseTestCase(object):
def setUp(self):
super(_BaseTestCase, self).setUp()
self.db = None
self.user_id = 'fake'
self.project_id = 'fake'
self.context = FakeContext(self.user_id, self.project_id)
def stub_out_client_exceptions(self):
def passthru(exceptions, func, *args, **kwargs):
return func(*args, **kwargs)
self.stubs.Set(rpc_common, 'catch_client_exception', passthru)
def _create_fake_instance(self, params=None, type_name='m1.tiny'):
if not params:
params = {}
inst = {}
inst['vm_state'] = vm_states.ACTIVE
inst['image_ref'] = FAKE_IMAGE_REF
inst['reservation_id'] = 'r-fakeres'
inst['launch_time'] = '10'
inst['user_id'] = self.user_id
inst['project_id'] = self.project_id
inst['host'] = 'fake_host'
type_id = flavors.get_instance_type_by_name(type_name)['id']
inst['instance_type_id'] = type_id
inst['ami_launch_index'] = 0
inst['memory_mb'] = 0
inst['vcpus'] = 0
inst['root_gb'] = 0
inst['ephemeral_gb'] = 0
inst['architecture'] = 'x86_64'
inst['os_type'] = 'Linux'
inst['availability_zone'] = 'fake-az'
inst.update(params)
return db.instance_create(self.context, inst)
def _do_update(self, instance_uuid, **updates):
return self.conductor.instance_update(self.context, instance_uuid,
updates)
def test_instance_update(self):
instance = self._create_fake_instance()
new_inst = self._do_update(instance['uuid'],
vm_state=vm_states.STOPPED)
instance = db.instance_get_by_uuid(self.context, instance['uuid'])
self.assertEqual(instance['vm_state'], vm_states.STOPPED)
self.assertEqual(new_inst['vm_state'], instance['vm_state'])
def test_action_event_start(self):
self.mox.StubOutWithMock(db, 'action_event_start')
db.action_event_start(self.context, mox.IgnoreArg())
self.mox.ReplayAll()
self.conductor.action_event_start(self.context, {})
def test_action_event_finish(self):
self.mox.StubOutWithMock(db, 'action_event_finish')
db.action_event_finish(self.context, mox.IgnoreArg())
self.mox.ReplayAll()
self.conductor.action_event_finish(self.context, {})
def test_instance_update_invalid_key(self):
# NOTE(danms): the real DB API call ignores invalid keys
if self.db == None:
self.stub_out_client_exceptions()
self.assertRaises(KeyError,
self._do_update, 'any-uuid', foobar=1)
def test_migration_get(self):
migration = db.migration_create(self.context.elevated(),
{'instance_uuid': 'fake-uuid',
'status': 'migrating'})
self.assertEqual(jsonutils.to_primitive(migration),
self.conductor.migration_get(self.context,
migration['id']))
def test_migration_get_unconfirmed_by_dest_compute(self):
self.mox.StubOutWithMock(db,
'migration_get_unconfirmed_by_dest_compute')
db.migration_get_unconfirmed_by_dest_compute(self.context,
'fake-window',
'fake-host')
self.mox.ReplayAll()
self.conductor.migration_get_unconfirmed_by_dest_compute(self.context,
'fake-window',
'fake-host')
def test_migration_get_in_progress_by_host_and_node(self):
self.mox.StubOutWithMock(db,
'migration_get_in_progress_by_host_and_node')
db.migration_get_in_progress_by_host_and_node(
self.context, 'fake-host', 'fake-node').AndReturn('fake-result')
self.mox.ReplayAll()
result = self.conductor.migration_get_in_progress_by_host_and_node(
self.context, 'fake-host', 'fake-node')
self.assertEqual(result, 'fake-result')
def test_migration_create(self):
inst = {'uuid': 'fake-uuid',
'host': 'fake-host',
'node': 'fake-node'}
self.mox.StubOutWithMock(db, 'migration_create')
db.migration_create(self.context.elevated(),
{'instance_uuid': inst['uuid'],
'source_compute': inst['host'],
'source_node': inst['node'],
'fake-key': 'fake-value'}).AndReturn('result')
self.mox.ReplayAll()
result = self.conductor.migration_create(self.context, inst,
{'fake-key': 'fake-value'})
self.assertEqual(result, 'result')
def test_migration_update(self):
migration = db.migration_create(self.context.elevated(),
{'instance_uuid': 'fake-uuid',
'status': 'migrating'})
migration_p = jsonutils.to_primitive(migration)
migration = self.conductor.migration_update(self.context, migration_p,
'finished')
self.assertEqual(migration['status'], 'finished')
def test_instance_get_by_uuid(self):
orig_instance = self._create_fake_instance()
copy_instance = self.conductor.instance_get_by_uuid(
self.context, orig_instance['uuid'])
self.assertEqual(orig_instance['name'],
copy_instance['name'])
def _setup_aggregate_with_host(self):
aggregate_ref = db.aggregate_create(self.context.elevated(),
{'name': 'foo'}, metadata={'availability_zone': 'foo'})
self.conductor.aggregate_host_add(self.context, aggregate_ref, 'bar')
aggregate_ref = db.aggregate_get(self.context.elevated(),
aggregate_ref['id'])
return aggregate_ref
def test_aggregate_host_add(self):
aggregate_ref = self._setup_aggregate_with_host()
self.assertTrue(any([host == 'bar'
for host in aggregate_ref['hosts']]))
db.aggregate_delete(self.context.elevated(), aggregate_ref['id'])
def test_aggregate_host_delete(self):
aggregate_ref = self._setup_aggregate_with_host()
self.conductor.aggregate_host_delete(self.context, aggregate_ref,
'bar')
aggregate_ref = db.aggregate_get(self.context.elevated(),
aggregate_ref['id'])
self.assertFalse(any([host == 'bar'
for host in aggregate_ref['hosts']]))
db.aggregate_delete(self.context.elevated(), aggregate_ref['id'])
def test_aggregate_get(self):
aggregate_ref = self._setup_aggregate_with_host()
aggregate = self.conductor.aggregate_get(self.context,
aggregate_ref['id'])
self.assertEqual(jsonutils.to_primitive(aggregate_ref), aggregate)
db.aggregate_delete(self.context.elevated(), aggregate_ref['id'])
def test_aggregate_get_by_host(self):
self._setup_aggregate_with_host()
aggregates = self.conductor.aggregate_get_by_host(self.context, 'bar')
self.assertEqual(aggregates[0]['availability_zone'], 'foo')
def test_aggregate_metadata_add(self):
aggregate = {'name': 'fake aggregate', 'id': 'fake-id'}
metadata = {'foo': 'bar'}
self.mox.StubOutWithMock(db, 'aggregate_metadata_add')
db.aggregate_metadata_add(
mox.IgnoreArg(), aggregate['id'], metadata, False).AndReturn(
metadata)
self.mox.ReplayAll()
result = self.conductor.aggregate_metadata_add(self.context,
aggregate,
metadata)
self.assertEqual(result, metadata)
def test_aggregate_metadata_delete(self):
aggregate = {'name': 'fake aggregate', 'id': 'fake-id'}
self.mox.StubOutWithMock(db, 'aggregate_metadata_delete')
db.aggregate_metadata_delete(mox.IgnoreArg(), aggregate['id'], 'fake')
self.mox.ReplayAll()
result = self.conductor.aggregate_metadata_delete(self.context,
aggregate,
'fake')
def test_aggregate_metadata_get_by_host(self):
self.mox.StubOutWithMock(db, 'aggregate_metadata_get_by_host')
db.aggregate_metadata_get_by_host(self.context, 'host',
'key').AndReturn('result')
self.mox.ReplayAll()
result = self.conductor.aggregate_metadata_get_by_host(self.context,
'host', 'key')
self.assertEqual(result, 'result')
def test_bw_usage_update(self):
self.mox.StubOutWithMock(db, 'bw_usage_update')
self.mox.StubOutWithMock(db, 'bw_usage_get')
update_args = (self.context, 'uuid', 'mac', 0, 10, 20, 5, 10, 20)
get_args = (self.context, 'uuid', 0, 'mac')
db.bw_usage_update(*update_args, update_cells=True)
db.bw_usage_get(*get_args).AndReturn('foo')
self.mox.ReplayAll()
result = self.conductor.bw_usage_update(*update_args)
self.assertEqual(result, 'foo')
def test_security_group_get_by_instance(self):
fake_instance = {'id': 'fake-instance'}
self.mox.StubOutWithMock(db, 'security_group_get_by_instance')
db.security_group_get_by_instance(
self.context, fake_instance['id']).AndReturn('it worked')
self.mox.ReplayAll()
result = self.conductor.security_group_get_by_instance(self.context,
fake_instance)
self.assertEqual(result, 'it worked')
def test_security_group_rule_get_by_security_group(self):
fake_secgroup = {'id': 'fake-secgroup'}
self.mox.StubOutWithMock(db,
'security_group_rule_get_by_security_group')
db.security_group_rule_get_by_security_group(
self.context, fake_secgroup['id']).AndReturn('it worked')
self.mox.ReplayAll()
result = self.conductor.security_group_rule_get_by_security_group(
self.context, fake_secgroup)
self.assertEqual(result, 'it worked')
def test_provider_fw_rule_get_all(self):
fake_rules = ['a', 'b', 'c']
self.mox.StubOutWithMock(db, 'provider_fw_rule_get_all')
db.provider_fw_rule_get_all(self.context).AndReturn(fake_rules)
self.mox.ReplayAll()
result = self.conductor.provider_fw_rule_get_all(self.context)
self.assertEqual(result, fake_rules)
def test_agent_build_get_by_triple(self):
self.mox.StubOutWithMock(db, 'agent_build_get_by_triple')
db.agent_build_get_by_triple(self.context, 'fake-hv', 'fake-os',
'fake-arch').AndReturn('it worked')
self.mox.ReplayAll()
result = self.conductor.agent_build_get_by_triple(self.context,
'fake-hv',
'fake-os',
'fake-arch')
self.assertEqual(result, 'it worked')
def test_block_device_mapping_get_all_by_instance(self):
fake_inst = {'uuid': 'fake-uuid'}
self.mox.StubOutWithMock(db,
'block_device_mapping_get_all_by_instance')
db.block_device_mapping_get_all_by_instance(
self.context, fake_inst['uuid']).AndReturn('fake-result')
self.mox.ReplayAll()
result = self.conductor.block_device_mapping_get_all_by_instance(
self.context, fake_inst)
self.assertEqual(result, 'fake-result')
def test_instance_get_all_hung_in_rebooting(self):
self.mox.StubOutWithMock(db, 'instance_get_all_hung_in_rebooting')
db.instance_get_all_hung_in_rebooting(self.context, 123)
self.mox.ReplayAll()
self.conductor.instance_get_all_hung_in_rebooting(self.context, 123)
def test_instance_get_active_by_window_joined(self):
self.mox.StubOutWithMock(db, 'instance_get_active_by_window_joined')
db.instance_get_active_by_window_joined(self.context, 'fake-begin',
'fake-end', 'fake-proj',
'fake-host')
self.mox.ReplayAll()
self.conductor.instance_get_active_by_window_joined(
self.context, 'fake-begin', 'fake-end', 'fake-proj', 'fake-host')
def test_instance_destroy(self):
self.mox.StubOutWithMock(db, 'instance_destroy')
db.instance_destroy(self.context, 'fake-uuid')
self.mox.ReplayAll()
self.conductor.instance_destroy(self.context, {'uuid': 'fake-uuid'})
def test_instance_info_cache_delete(self):
self.mox.StubOutWithMock(db, 'instance_info_cache_delete')
db.instance_info_cache_delete(self.context, 'fake-uuid')
self.mox.ReplayAll()
self.conductor.instance_info_cache_delete(self.context,
{'uuid': 'fake-uuid'})
def test_instance_info_cache_update(self):
fake_values = {'key1': 'val1', 'key2': 'val2'}
fake_instance = {'uuid': 'fake-uuid'}
self.mox.StubOutWithMock(db, 'instance_info_cache_update')
db.instance_info_cache_update(self.context, 'fake-uuid',
fake_values,
update_cells='meow')
self.mox.ReplayAll()
self.conductor.instance_info_cache_update(self.context,
fake_instance,
fake_values,
update_cells='meow')
def test_instance_type_get(self):
self.mox.StubOutWithMock(db, 'instance_type_get')
db.instance_type_get(self.context, 'fake-id').AndReturn('fake-type')
self.mox.ReplayAll()
result = self.conductor.instance_type_get(self.context, 'fake-id')
self.assertEqual(result, 'fake-type')
def test_vol_get_usage_by_time(self):
self.mox.StubOutWithMock(db, 'vol_get_usage_by_time')
db.vol_get_usage_by_time(self.context, 'fake-time').AndReturn(
'fake-usage')
self.mox.ReplayAll()
result = self.conductor.vol_get_usage_by_time(self.context,
'fake-time')
self.assertEqual(result, 'fake-usage')
def test_vol_usage_update(self):
self.mox.StubOutWithMock(db, 'vol_usage_update')
inst = self._create_fake_instance({
'project_id': 'fake-project_id',
'user_id': 'fake-user_id',
})
db.vol_usage_update(self.context, 'fake-vol', 'rd-req', 'rd-bytes',
'wr-req', 'wr-bytes', inst['uuid'],
'fake-project_id', 'fake-user_id', 'fake-az',
'fake-refr', 'fake-bool')
self.mox.ReplayAll()
self.conductor.vol_usage_update(self.context, 'fake-vol', 'rd-req',
'rd-bytes', 'wr-req', 'wr-bytes',
inst, 'fake-refr', 'fake-bool')
def test_compute_node_create(self):
self.mox.StubOutWithMock(db, 'compute_node_create')
db.compute_node_create(self.context, 'fake-values').AndReturn(
'fake-result')
self.mox.ReplayAll()
result = self.conductor.compute_node_create(self.context,
'fake-values')
self.assertEqual(result, 'fake-result')
def test_compute_node_update(self):
node = {'id': 'fake-id'}
self.mox.StubOutWithMock(db, 'compute_node_update')
db.compute_node_update(self.context, node['id'], 'fake-values',
False).AndReturn('fake-result')
self.mox.ReplayAll()
result = self.conductor.compute_node_update(self.context, node,
'fake-values', False)
self.assertEqual(result, 'fake-result')
def test_compute_node_delete(self):
node = {'id': 'fake-id'}
self.mox.StubOutWithMock(db, 'compute_node_delete')
db.compute_node_delete(self.context, node['id']).AndReturn(None)
self.mox.ReplayAll()
result = self.conductor.compute_node_delete(self.context, node)
self.assertEqual(result, None)
def test_instance_fault_create(self):
self.mox.StubOutWithMock(db, 'instance_fault_create')
db.instance_fault_create(self.context, 'fake-values').AndReturn(
'fake-result')
self.mox.ReplayAll()
result = self.conductor.instance_fault_create(self.context,
'fake-values')
self.assertEqual(result, 'fake-result')
def test_task_log_get(self):
self.mox.StubOutWithMock(db, 'task_log_get')
db.task_log_get(self.context, 'task', 'begin', 'end', 'host',
'state').AndReturn('result')
self.mox.ReplayAll()
result = self.conductor.task_log_get(self.context, 'task', 'begin',
'end', 'host', 'state')
self.assertEqual(result, 'result')
def test_task_log_get_with_no_state(self):
self.mox.StubOutWithMock(db, 'task_log_get')
db.task_log_get(self.context, 'task', 'begin', 'end',
'host', None).AndReturn('result')
self.mox.ReplayAll()
result = self.conductor.task_log_get(self.context, 'task', 'begin',
'end', 'host')
self.assertEqual(result, 'result')
def test_task_log_begin_task(self):
self.mox.StubOutWithMock(db, 'task_log_begin_task')
db.task_log_begin_task(self.context.elevated(), 'task', 'begin',
'end', 'host', 'items',
'message').AndReturn('result')
self.mox.ReplayAll()
result = self.conductor.task_log_begin_task(
self.context, 'task', 'begin', 'end', 'host', 'items', 'message')
self.assertEqual(result, 'result')
def test_task_log_end_task(self):
self.mox.StubOutWithMock(db, 'task_log_end_task')
db.task_log_end_task(self.context.elevated(), 'task', 'begin', 'end',
'host', 'errors', 'message').AndReturn('result')
self.mox.ReplayAll()
result = self.conductor.task_log_end_task(
self.context, 'task', 'begin', 'end', 'host', 'errors', 'message')
self.assertEqual(result, 'result')
def test_notify_usage_exists(self):
info = {
'audit_period_beginning': 'start',
'audit_period_ending': 'end',
'bandwidth': {},
'image_meta': {},
'extra': 'info',
}
instance = {
'system_metadata': [],
}
self.mox.StubOutWithMock(notifications, 'audit_period_bounds')
self.mox.StubOutWithMock(compute_utils, 'notify_about_instance_usage')
notifications.audit_period_bounds(False).AndReturn(('start', 'end'))
compute_utils.notify_about_instance_usage(self.context, instance,
'exists',
system_metadata={},
extra_usage_info=info)
self.mox.ReplayAll()
self.conductor.notify_usage_exists(self.context, instance,
system_metadata={},
extra_usage_info=dict(extra='info'))
def test_security_groups_trigger_members_refresh(self):
self.mox.StubOutWithMock(self.conductor_manager.security_group_api,
'trigger_members_refresh')
self.conductor_manager.security_group_api.trigger_members_refresh(
self.context, [1, 2, 3])
self.mox.ReplayAll()
self.conductor.security_groups_trigger_members_refresh(self.context,
[1, 2, 3])
def test_network_migrate_instance_start(self):
self.mox.StubOutWithMock(self.conductor_manager.network_api,
'migrate_instance_start')
self.conductor_manager.network_api.migrate_instance_start(self.context,
'instance',
'migration')
self.mox.ReplayAll()
self.conductor.network_migrate_instance_start(self.context,
'instance',
'migration')
def test_network_migrate_instance_finish(self):
self.mox.StubOutWithMock(self.conductor_manager.network_api,
'migrate_instance_finish')
self.conductor_manager.network_api.migrate_instance_finish(
self.context, 'instance', 'migration')
self.mox.ReplayAll()
self.conductor.network_migrate_instance_finish(self.context,
'instance',
'migration')
def test_quota_commit(self):
self.mox.StubOutWithMock(quota.QUOTAS, 'commit')
quota.QUOTAS.commit(self.context, 'reservations', project_id=None)
quota.QUOTAS.commit(self.context, 'reservations', project_id='proj')
self.mox.ReplayAll()
self.conductor.quota_commit(self.context, 'reservations')
self.conductor.quota_commit(self.context, 'reservations', 'proj')
def test_quota_rollback(self):
self.mox.StubOutWithMock(quota.QUOTAS, 'rollback')
quota.QUOTAS.rollback(self.context, 'reservations', project_id=None)
quota.QUOTAS.rollback(self.context, 'reservations', project_id='proj')
self.mox.ReplayAll()
self.conductor.quota_rollback(self.context, 'reservations')
self.conductor.quota_rollback(self.context, 'reservations', 'proj')
def test_get_ec2_ids(self):
expected = {
'instance-id': 'ec2-inst-id',
'ami-id': 'ec2-ami-id',
'kernel-id': 'ami-kernel-ec2-kernelid',
'ramdisk-id': 'ami-ramdisk-ec2-ramdiskid',
}
inst = {
'uuid': 'fake-uuid',
'kernel_id': 'ec2-kernelid',
'ramdisk_id': 'ec2-ramdiskid',
'image_ref': 'fake-image',
}
self.mox.StubOutWithMock(ec2utils, 'id_to_ec2_inst_id')
self.mox.StubOutWithMock(ec2utils, 'glance_id_to_ec2_id')
self.mox.StubOutWithMock(ec2utils, 'image_type')
ec2utils.id_to_ec2_inst_id(inst['uuid']).AndReturn(
expected['instance-id'])
ec2utils.glance_id_to_ec2_id(self.context,
inst['image_ref']).AndReturn(
expected['ami-id'])
for image_type in ['kernel', 'ramdisk']:
image_id = inst['%s_id' % image_type]
ec2utils.image_type(image_type).AndReturn('ami-' + image_type)
ec2utils.glance_id_to_ec2_id(self.context, image_id,
'ami-' + image_type).AndReturn(
'ami-%s-ec2-%sid' % (image_type, image_type))
self.mox.ReplayAll()
result = self.conductor.get_ec2_ids(self.context, inst)
self.assertEqual(result, expected)
def test_compute_stop(self):
self.mox.StubOutWithMock(self.conductor_manager.compute_api, 'stop')
self.conductor_manager.compute_api.stop(self.context, 'instance', True)
self.mox.ReplayAll()
self.conductor.compute_stop(self.context, 'instance')
def test_compute_confirm_resize(self):
self.mox.StubOutWithMock(self.conductor_manager.compute_api,
'confirm_resize')
self.conductor_manager.compute_api.confirm_resize(self.context,
'instance',
'migration')
self.mox.ReplayAll()
self.conductor.compute_confirm_resize(self.context, 'instance',
'migration')
def test_compute_unrescue(self):
self.mox.StubOutWithMock(self.conductor_manager.compute_api,
'unrescue')
self.conductor_manager.compute_api.unrescue(self.context, 'instance')
self.mox.ReplayAll()
self.conductor.compute_unrescue(self.context, 'instance')
class ConductorTestCase(_BaseTestCase, test.TestCase):
"""Conductor Manager Tests."""
def setUp(self):
super(ConductorTestCase, self).setUp()
self.conductor = conductor_manager.ConductorManager()
self.conductor_manager = self.conductor
def test_block_device_mapping_update_or_create(self):
fake_bdm = {'id': 'fake-id'}
self.mox.StubOutWithMock(db, 'block_device_mapping_create')
self.mox.StubOutWithMock(db, 'block_device_mapping_update')
self.mox.StubOutWithMock(db, 'block_device_mapping_update_or_create')
db.block_device_mapping_create(self.context, fake_bdm)
db.block_device_mapping_update(self.context, fake_bdm['id'], fake_bdm)
db.block_device_mapping_update_or_create(self.context, fake_bdm)
self.mox.ReplayAll()
self.conductor.block_device_mapping_update_or_create(self.context,
fake_bdm,
create=True)
self.conductor.block_device_mapping_update_or_create(self.context,
fake_bdm,
create=False)
self.conductor.block_device_mapping_update_or_create(self.context,
fake_bdm)
def test_block_device_mapping_destroy(self):
fake_bdm = {'id': 'fake-bdm'}
fake_bdm2 = {'id': 'fake-bdm-2'}
fake_inst = {'uuid': 'fake-uuid'}
self.mox.StubOutWithMock(db, 'block_device_mapping_destroy')
self.mox.StubOutWithMock(
db, 'block_device_mapping_destroy_by_instance_and_device')
self.mox.StubOutWithMock(
db, 'block_device_mapping_destroy_by_instance_and_volume')
db.block_device_mapping_destroy(self.context, 'fake-bdm')
db.block_device_mapping_destroy(self.context, 'fake-bdm-2')
db.block_device_mapping_destroy_by_instance_and_device(self.context,
'fake-uuid',
'fake-device')
db.block_device_mapping_destroy_by_instance_and_volume(self.context,
'fake-uuid',
'fake-volume')
self.mox.ReplayAll()
self.conductor.block_device_mapping_destroy(self.context,
[fake_bdm,
fake_bdm2])
self.conductor.block_device_mapping_destroy(self.context,
instance=fake_inst,
device_name='fake-device')
self.conductor.block_device_mapping_destroy(self.context,
instance=fake_inst,
volume_id='fake-volume')
def test_instance_get_all_by_filters(self):
filters = {'foo': 'bar'}
self.mox.StubOutWithMock(db, 'instance_get_all_by_filters')
db.instance_get_all_by_filters(self.context, filters,
'fake-key', 'fake-sort',
columns_to_join=None)
self.mox.ReplayAll()
self.conductor.instance_get_all_by_filters(self.context, filters,
'fake-key', 'fake-sort')
def test_instance_get_all_by_host(self):
self.mox.StubOutWithMock(db, 'instance_get_all_by_host')
self.mox.StubOutWithMock(db, 'instance_get_all_by_host_and_node')
db.instance_get_all_by_host(self.context.elevated(),
'host', None).AndReturn('result')
db.instance_get_all_by_host_and_node(self.context.elevated(), 'host',
'node').AndReturn('result')
self.mox.ReplayAll()
result = self.conductor.instance_get_all_by_host(self.context, 'host')
self.assertEqual(result, 'result')
result = self.conductor.instance_get_all_by_host(self.context, 'host',
'node')
self.assertEqual(result, 'result')
def _test_stubbed(self, name, dbargs, condargs,
db_result_listified=False, db_exception=None):
self.mox.StubOutWithMock(db, name)
if db_exception:
getattr(db, name)(self.context, *dbargs).AndRaise(db_exception)
getattr(db, name)(self.context, *dbargs).AndRaise(db_exception)
else:
getattr(db, name)(self.context, *dbargs).AndReturn('fake-result')
self.mox.ReplayAll()
if db_exception:
self.assertRaises(rpc_common.ClientException,
self.conductor.service_get_all_by,
self.context, **condargs)
self.stub_out_client_exceptions()
self.assertRaises(db_exception.__class__,
self.conductor.service_get_all_by,
self.context, **condargs)
else:
result = self.conductor.service_get_all_by(self.context,
**condargs)
if db_result_listified:
self.assertEqual(['fake-result'], result)
else:
self.assertEqual('fake-result', result)
def test_service_get_all(self):
self._test_stubbed('service_get_all', (), {})
def test_service_get_by_host_and_topic(self):
self._test_stubbed('service_get_by_host_and_topic',
('host', 'topic'),
dict(topic='topic', host='host'))
def test_service_get_all_by_topic(self):
self._test_stubbed('service_get_all_by_topic',
('topic',),
dict(topic='topic'))
def test_service_get_all_by_host(self):
self._test_stubbed('service_get_all_by_host',
('host',),
dict(host='host'))
def test_service_get_by_compute_host(self):
self._test_stubbed('service_get_by_compute_host',
('host',),
dict(topic='compute', host='host'),
db_result_listified=True)
def test_service_get_by_args(self):
self._test_stubbed('service_get_by_args',
('host', 'binary'),
dict(host='host', binary='binary'))
def test_service_get_by_compute_host_not_found(self):
self._test_stubbed('service_get_by_compute_host',
('host',),
dict(topic='compute', host='host'),
db_exception=exc.ComputeHostNotFound(host='host'))
def test_service_get_by_args_not_found(self):
self._test_stubbed('service_get_by_args',
('host', 'binary'),
dict(host='host', binary='binary'),
db_exception=exc.HostBinaryNotFound(binary='binary',
host='host'))
def test_security_groups_trigger_handler(self):
self.mox.StubOutWithMock(self.conductor_manager.security_group_api,
'trigger_handler')
self.conductor_manager.security_group_api.trigger_handler('event',
self.context,
'args')
self.mox.ReplayAll()
self.conductor.security_groups_trigger_handler(self.context,
'event', ['args'])
class ConductorRPCAPITestCase(_BaseTestCase, test.TestCase):
"""Conductor RPC API Tests."""
def setUp(self):
super(ConductorRPCAPITestCase, self).setUp()
self.conductor_service = self.start_service(
'conductor', manager='nova.conductor.manager.ConductorManager')
self.conductor_manager = self.conductor_service.manager
self.conductor = conductor_rpcapi.ConductorAPI()
def test_block_device_mapping_update_or_create(self):
fake_bdm = {'id': 'fake-id'}
self.mox.StubOutWithMock(db, 'block_device_mapping_create')
self.mox.StubOutWithMock(db, 'block_device_mapping_update')
self.mox.StubOutWithMock(db, 'block_device_mapping_update_or_create')
db.block_device_mapping_create(self.context, fake_bdm)
db.block_device_mapping_update(self.context, fake_bdm['id'], fake_bdm)
db.block_device_mapping_update_or_create(self.context, fake_bdm)
self.mox.ReplayAll()
self.conductor.block_device_mapping_update_or_create(self.context,
fake_bdm,
create=True)
self.conductor.block_device_mapping_update_or_create(self.context,
fake_bdm,
create=False)
self.conductor.block_device_mapping_update_or_create(self.context,
fake_bdm)
def test_block_device_mapping_destroy(self):
fake_bdm = {'id': 'fake-bdm'}
fake_inst = {'uuid': 'fake-uuid'}
self.mox.StubOutWithMock(db, 'block_device_mapping_destroy')
self.mox.StubOutWithMock(
db, 'block_device_mapping_destroy_by_instance_and_device')
self.mox.StubOutWithMock(
db, 'block_device_mapping_destroy_by_instance_and_volume')
db.block_device_mapping_destroy(self.context, 'fake-bdm')
db.block_device_mapping_destroy_by_instance_and_device(self.context,
'fake-uuid',
'fake-device')
db.block_device_mapping_destroy_by_instance_and_volume(self.context,
'fake-uuid',
'fake-volume')
self.mox.ReplayAll()
self.conductor.block_device_mapping_destroy(self.context,
bdms=[fake_bdm])
self.conductor.block_device_mapping_destroy(self.context,
instance=fake_inst,
device_name='fake-device')
self.conductor.block_device_mapping_destroy(self.context,
instance=fake_inst,
volume_id='fake-volume')
def test_instance_get_all_by_filters(self):
filters = {'foo': 'bar'}
self.mox.StubOutWithMock(db, 'instance_get_all_by_filters')
db.instance_get_all_by_filters(self.context, filters,
'fake-key', 'fake-sort',
columns_to_join=None)
self.mox.ReplayAll()
self.conductor.instance_get_all_by_filters(self.context, filters,
'fake-key', 'fake-sort')
def _test_stubbed(self, name, dbargs, condargs,
db_result_listified=False, db_exception=None):
self.mox.StubOutWithMock(db, name)
if db_exception:
getattr(db, name)(self.context, *dbargs).AndRaise(db_exception)
else:
getattr(db, name)(self.context, *dbargs).AndReturn('fake-result')
self.mox.ReplayAll()
if db_exception:
self.assertRaises(db_exception.__class__,
self.conductor.service_get_all_by,
self.context, **condargs)
else:
result = self.conductor.service_get_all_by(self.context,
**condargs)
if db_result_listified:
self.assertEqual(['fake-result'], result)
else:
self.assertEqual('fake-result', result)
def test_service_get_all(self):
self._test_stubbed('service_get_all', (), {})
def test_service_get_by_host_and_topic(self):
self._test_stubbed('service_get_by_host_and_topic',
('host', 'topic'),
dict(topic='topic', host='host'))
def test_service_get_all_by_topic(self):
self._test_stubbed('service_get_all_by_topic',
('topic',),
dict(topic='topic'))
def test_service_get_all_by_host(self):
self._test_stubbed('service_get_all_by_host',
('host',),
dict(host='host'))
def test_service_get_by_compute_host(self):
self._test_stubbed('service_get_by_compute_host',
('host',),
dict(topic='compute', host='host'),
db_result_listified=True)
def test_service_get_by_args(self):
self._test_stubbed('service_get_by_args',
('host', 'binary'),
dict(host='host', binary='binary'))
def test_service_get_by_compute_host_not_found(self):
self._test_stubbed('service_get_by_compute_host',
('host',),
dict(topic='compute', host='host'),
db_exception=exc.ComputeHostNotFound(host='host'))
def test_service_get_by_args_not_found(self):
self._test_stubbed('service_get_by_args',
('host', 'binary'),
dict(host='host', binary='binary'),
db_exception=exc.HostBinaryNotFound(binary='binary',
host='host'))
def test_security_groups_trigger_handler(self):
self.mox.StubOutWithMock(self.conductor_manager.security_group_api,
'trigger_handler')
self.conductor_manager.security_group_api.trigger_handler('event',
self.context,
'arg')
self.mox.ReplayAll()
self.conductor.security_groups_trigger_handler(self.context,
'event', ['arg'])
class ConductorAPITestCase(_BaseTestCase, test.TestCase):
"""Conductor API Tests."""
def setUp(self):
super(ConductorAPITestCase, self).setUp()
self.conductor_service = self.start_service(
'conductor', manager='nova.conductor.manager.ConductorManager')
self.conductor = conductor_api.API()
self.conductor_manager = self.conductor_service.manager
self.db = None
def _do_update(self, instance_uuid, **updates):
# NOTE(danms): the public API takes actual keyword arguments,
# so override the base class here to make the call correctly
return self.conductor.instance_update(self.context, instance_uuid,
**updates)
def test_bw_usage_get(self):
self.mox.StubOutWithMock(db, 'bw_usage_update')
self.mox.StubOutWithMock(db, 'bw_usage_get')
get_args = (self.context, 'uuid', 0, 'mac')
db.bw_usage_get(*get_args).AndReturn('foo')
self.mox.ReplayAll()
result = self.conductor.bw_usage_get(*get_args)
self.assertEqual(result, 'foo')
def test_block_device_mapping_update_or_create(self):
self.mox.StubOutWithMock(db, 'block_device_mapping_create')
self.mox.StubOutWithMock(db, 'block_device_mapping_update')
self.mox.StubOutWithMock(db, 'block_device_mapping_update_or_create')
db.block_device_mapping_create(self.context, 'fake-bdm')
db.block_device_mapping_update(self.context,
'fake-id', {'id': 'fake-id'})
db.block_device_mapping_update_or_create(self.context, 'fake-bdm')
self.mox.ReplayAll()
self.conductor.block_device_mapping_create(self.context, 'fake-bdm')
self.conductor.block_device_mapping_update(self.context, 'fake-id', {})
self.conductor.block_device_mapping_update_or_create(self.context,
'fake-bdm')
def test_block_device_mapping_destroy(self):
fake_bdm = {'id': 'fake-bdm'}
fake_inst = {'uuid': 'fake-uuid'}
self.mox.StubOutWithMock(db, 'block_device_mapping_destroy')
self.mox.StubOutWithMock(
db, 'block_device_mapping_destroy_by_instance_and_device')
self.mox.StubOutWithMock(
db, 'block_device_mapping_destroy_by_instance_and_volume')
db.block_device_mapping_destroy(self.context, 'fake-bdm')
db.block_device_mapping_destroy_by_instance_and_device(self.context,
'fake-uuid',
'fake-device')
db.block_device_mapping_destroy_by_instance_and_volume(self.context,
'fake-uuid',
'fake-volume')
self.mox.ReplayAll()
self.conductor.block_device_mapping_destroy(self.context, [fake_bdm])
self.conductor.block_device_mapping_destroy_by_instance_and_device(
self.context, fake_inst, 'fake-device')
self.conductor.block_device_mapping_destroy_by_instance_and_volume(
self.context, fake_inst, 'fake-volume')
def test_instance_get_all(self):
self.mox.StubOutWithMock(db, 'instance_get_all_by_filters')
db.instance_get_all(self.context)
db.instance_get_all_by_filters(self.context, {'name': 'fake-inst'},
'updated_at', 'asc',
columns_to_join=None)
self.mox.ReplayAll()
self.conductor.instance_get_all(self.context)
self.conductor.instance_get_all_by_filters(self.context,
{'name': 'fake-inst'},
'updated_at', 'asc')
def _test_stubbed(self, name, *args, **kwargs):
if args and isinstance(args[0], FakeContext):
ctxt = args[0]
args = args[1:]
else:
ctxt = self.context
db_exception = kwargs.get('db_exception')
self.mox.StubOutWithMock(db, name)
if db_exception:
getattr(db, name)(ctxt, *args).AndRaise(db_exception)
else:
getattr(db, name)(ctxt, *args).AndReturn('fake-result')
if name == 'service_destroy':
# TODO(russellb) This is a hack ... SetUp() starts the conductor()
# service. There is a cleanup step that runs after this test which
# also deletes the associated service record. This involves a call
# to db.service_destroy(), which we have stubbed out.
db.service_destroy(mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
if db_exception:
self.assertRaises(db_exception.__class__,
getattr(self.conductor, name),
self.context, *args)
else:
result = getattr(self.conductor, name)(self.context, *args)
self.assertEqual(
result, 'fake-result' if kwargs.get('returns', True) else None)
def test_service_get_all(self):
self._test_stubbed('service_get_all')
def test_service_get_by_host_and_topic(self):
self._test_stubbed('service_get_by_host_and_topic', 'host', 'topic')
def test_service_get_all_by_topic(self):
self._test_stubbed('service_get_all_by_topic', 'topic')
def test_service_get_all_by_host(self):
self._test_stubbed('service_get_all_by_host', 'host')
def test_service_get_by_compute_host(self):
self._test_stubbed('service_get_by_compute_host', 'host')
def test_service_get_by_args(self):
self._test_stubbed('service_get_by_args', 'host', 'binary')
def test_service_get_by_compute_host_not_found(self):
self._test_stubbed('service_get_by_compute_host', 'host',
db_exception=exc.ComputeHostNotFound(host='host'))
def test_service_get_by_args_not_found(self):
self._test_stubbed('service_get_by_args', 'host', 'binary',
db_exception=exc.HostBinaryNotFound(binary='binary',
host='host'))
def test_service_create(self):
self._test_stubbed('service_create', {})
def test_service_destroy(self):
self._test_stubbed('service_destroy', '', returns=False)
def test_service_update(self):
ctxt = self.context
self.mox.StubOutWithMock(db, 'service_update')
db.service_update(ctxt, '', {}).AndReturn('fake-result')
self.mox.ReplayAll()
result = self.conductor.service_update(self.context, {'id': ''}, {})
self.assertEqual(result, 'fake-result')
def test_instance_get_all_by_host_and_node(self):
self._test_stubbed('instance_get_all_by_host_and_node',
self.context.elevated(), 'host', 'node')
def test_instance_get_all_by_host(self):
self.mox.StubOutWithMock(db, 'instance_get_all_by_host')
self.mox.StubOutWithMock(db, 'instance_get_all_by_host_and_node')
db.instance_get_all_by_host(self.context.elevated(), 'host',
None).AndReturn('fake-result')
self.mox.ReplayAll()
result = self.conductor.instance_get_all_by_host(self.context,
'host')
self.assertEqual(result, 'fake-result')
def test_wait_until_ready(self):
timeouts = []
calls = dict(count=0)
def fake_ping(context, message, timeout):
timeouts.append(timeout)
calls['count'] += 1
if calls['count'] < 15:
raise rpc_common.Timeout("fake")
self.stubs.Set(self.conductor.base_rpcapi, 'ping', fake_ping)
self.conductor.wait_until_ready(self.context)
self.assertEqual(timeouts.count(10), 10)
self.assertTrue(None in timeouts)
def test_security_groups_trigger_handler(self):
self.mox.StubOutWithMock(self.conductor_manager.security_group_api,
'trigger_handler')
self.conductor_manager.security_group_api.trigger_handler('event',
self.context,
'arg')
self.mox.ReplayAll()
self.conductor.security_groups_trigger_handler(self.context,
'event', 'arg')
class ConductorLocalAPITestCase(ConductorAPITestCase):
"""Conductor LocalAPI Tests."""
def setUp(self):
super(ConductorLocalAPITestCase, self).setUp()
self.conductor = conductor_api.LocalAPI()
self.conductor_manager = self.conductor._manager._target
self.db = db
def test_client_exceptions(self):
instance = self._create_fake_instance()
# NOTE(danms): The LocalAPI should not raise exceptions wrapped
# in ClientException. KeyError should be raised if an invalid
# update key is passed, so use that to validate.
self.assertRaises(KeyError,
self._do_update, instance['uuid'], foo='bar')
def test_wait_until_ready(self):
# Override test in ConductorAPITestCase
pass
class ConductorImportTest(test.TestCase):
def test_import_conductor_local(self):
self.flags(use_local=True, group='conductor')
self.assertTrue(isinstance(conductor.API(),
conductor_api.LocalAPI))
def test_import_conductor_rpc(self):
self.flags(use_local=False, group='conductor')
self.assertTrue(isinstance(conductor.API(),
conductor_api.API))
def test_import_conductor_override_to_local(self):
self.flags(use_local=False, group='conductor')
self.assertTrue(isinstance(conductor.API(use_local=True),
conductor_api.LocalAPI))
class ConductorPolicyTest(test.TestCase):
def test_all_allowed_keys(self):
def fake_db_instance_update(self, *args, **kwargs):
return None, None
self.stubs.Set(db, 'instance_update_and_get_original',
fake_db_instance_update)
ctxt = context.RequestContext('fake-user', 'fake-project')
conductor = conductor_api.LocalAPI()
updates = {}
for key in conductor_manager.allowed_updates:
if key in conductor_manager.datetime_fields:
updates[key] = timeutils.utcnow()
else:
updates[key] = 'foo'
conductor.instance_update(ctxt, 'fake-instance', **updates)
def test_allowed_keys_are_real(self):
instance = models.Instance()
keys = list(conductor_manager.allowed_updates)
# NOTE(danms): expected_task_state is a parameter that gets
# passed to the db layer, but is not actually an instance attribute
del keys[keys.index('expected_task_state')]
for key in keys:
self.assertTrue(hasattr(instance, key))
|
erja-gp/openthread | refs/heads/master | tools/harness-automation/cases/leader_9_2_1.py | 16 | #!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
from autothreadharness.harness_case import HarnessCase
class Leader_9_2_1(HarnessCase):
role = HarnessCase.ROLE_LEADER
case = '9 2 1'
golden_devices_required = 1
def on_dialog(self, dialog, title):
pass
if __name__ == '__main__':
unittest.main()
|
elky/django | refs/heads/master | django/core/cache/backends/locmem.py | 11 | "Thread-safe in-memory cache backend."
import pickle
import time
from contextlib import contextmanager
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
from django.utils.synch import RWLock
# Global in-memory store of cache data. Keyed by name, to provide
# multiple named local memory caches.
_caches = {}
_expire_info = {}
_locks = {}
@contextmanager
def dummy():
"""A context manager that does nothing special."""
yield
class LocMemCache(BaseCache):
def __init__(self, name, params):
BaseCache.__init__(self, params)
self._cache = _caches.setdefault(name, {})
self._expire_info = _expire_info.setdefault(name, {})
self._lock = _locks.setdefault(name, RWLock())
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
with self._lock.writer():
if self._has_expired(key):
self._set(key, pickled, timeout)
return True
return False
def get(self, key, default=None, version=None, acquire_lock=True):
key = self.make_key(key, version=version)
self.validate_key(key)
pickled = None
with (self._lock.reader() if acquire_lock else dummy()):
if not self._has_expired(key):
pickled = self._cache[key]
if pickled is not None:
try:
return pickle.loads(pickled)
except pickle.PickleError:
return default
with (self._lock.writer() if acquire_lock else dummy()):
try:
del self._cache[key]
del self._expire_info[key]
except KeyError:
pass
return default
def _set(self, key, value, timeout=DEFAULT_TIMEOUT):
if len(self._cache) >= self._max_entries:
self._cull()
self._cache[key] = value
self._expire_info[key] = self.get_backend_timeout(timeout)
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
with self._lock.writer():
self._set(key, pickled, timeout)
def incr(self, key, delta=1, version=None):
with self._lock.writer():
value = self.get(key, version=version, acquire_lock=False)
if value is None:
raise ValueError("Key '%s' not found" % key)
new_value = value + delta
key = self.make_key(key, version=version)
pickled = pickle.dumps(new_value, pickle.HIGHEST_PROTOCOL)
self._cache[key] = pickled
return new_value
def has_key(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
with self._lock.reader():
if not self._has_expired(key):
return True
with self._lock.writer():
try:
del self._cache[key]
del self._expire_info[key]
except KeyError:
pass
return False
def _has_expired(self, key):
exp = self._expire_info.get(key, -1)
if exp is None or exp > time.time():
return False
return True
def _cull(self):
if self._cull_frequency == 0:
self.clear()
else:
doomed = [k for (i, k) in enumerate(self._cache) if i % self._cull_frequency == 0]
for k in doomed:
self._delete(k)
def _delete(self, key):
try:
del self._cache[key]
except KeyError:
pass
try:
del self._expire_info[key]
except KeyError:
pass
def delete(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
with self._lock.writer():
self._delete(key)
def clear(self):
self._cache.clear()
self._expire_info.clear()
|
cruzegoodin/TSC-ShippingDetails | refs/heads/master | flask/lib/python2.7/site-packages/pytz/tzfile.py | 480 | #!/usr/bin/env python
'''
$Id: tzfile.py,v 1.8 2004/06/03 00:15:24 zenzen Exp $
'''
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
from datetime import datetime, timedelta
from struct import unpack, calcsize
from pytz.tzinfo import StaticTzInfo, DstTzInfo, memorized_ttinfo
from pytz.tzinfo import memorized_datetime, memorized_timedelta
def _byte_string(s):
"""Cast a string or byte string to an ASCII byte string."""
return s.encode('US-ASCII')
_NULL = _byte_string('\0')
def _std_string(s):
"""Cast a string or byte string to an ASCII string."""
return str(s.decode('US-ASCII'))
def build_tzinfo(zone, fp):
head_fmt = '>4s c 15x 6l'
head_size = calcsize(head_fmt)
(magic, format, ttisgmtcnt, ttisstdcnt,leapcnt, timecnt,
typecnt, charcnt) = unpack(head_fmt, fp.read(head_size))
# Make sure it is a tzfile(5) file
assert magic == _byte_string('TZif'), 'Got magic %s' % repr(magic)
# Read out the transition times, localtime indices and ttinfo structures.
data_fmt = '>%(timecnt)dl %(timecnt)dB %(ttinfo)s %(charcnt)ds' % dict(
timecnt=timecnt, ttinfo='lBB'*typecnt, charcnt=charcnt)
data_size = calcsize(data_fmt)
data = unpack(data_fmt, fp.read(data_size))
# make sure we unpacked the right number of values
assert len(data) == 2 * timecnt + 3 * typecnt + 1
transitions = [memorized_datetime(trans)
for trans in data[:timecnt]]
lindexes = list(data[timecnt:2 * timecnt])
ttinfo_raw = data[2 * timecnt:-1]
tznames_raw = data[-1]
del data
# Process ttinfo into separate structs
ttinfo = []
tznames = {}
i = 0
while i < len(ttinfo_raw):
# have we looked up this timezone name yet?
tzname_offset = ttinfo_raw[i+2]
if tzname_offset not in tznames:
nul = tznames_raw.find(_NULL, tzname_offset)
if nul < 0:
nul = len(tznames_raw)
tznames[tzname_offset] = _std_string(
tznames_raw[tzname_offset:nul])
ttinfo.append((ttinfo_raw[i],
bool(ttinfo_raw[i+1]),
tznames[tzname_offset]))
i += 3
# Now build the timezone object
if len(transitions) == 0:
ttinfo[0][0], ttinfo[0][2]
cls = type(zone, (StaticTzInfo,), dict(
zone=zone,
_utcoffset=memorized_timedelta(ttinfo[0][0]),
_tzname=ttinfo[0][2]))
else:
# Early dates use the first standard time ttinfo
i = 0
while ttinfo[i][1]:
i += 1
if ttinfo[i] == ttinfo[lindexes[0]]:
transitions[0] = datetime.min
else:
transitions.insert(0, datetime.min)
lindexes.insert(0, i)
# calculate transition info
transition_info = []
for i in range(len(transitions)):
inf = ttinfo[lindexes[i]]
utcoffset = inf[0]
if not inf[1]:
dst = 0
else:
for j in range(i-1, -1, -1):
prev_inf = ttinfo[lindexes[j]]
if not prev_inf[1]:
break
dst = inf[0] - prev_inf[0] # dst offset
# Bad dst? Look further. DST > 24 hours happens when
# a timzone has moved across the international dateline.
if dst <= 0 or dst > 3600*3:
for j in range(i+1, len(transitions)):
stdinf = ttinfo[lindexes[j]]
if not stdinf[1]:
dst = inf[0] - stdinf[0]
if dst > 0:
break # Found a useful std time.
tzname = inf[2]
# Round utcoffset and dst to the nearest minute or the
# datetime library will complain. Conversions to these timezones
# might be up to plus or minus 30 seconds out, but it is
# the best we can do.
utcoffset = int((utcoffset + 30) // 60) * 60
dst = int((dst + 30) // 60) * 60
transition_info.append(memorized_ttinfo(utcoffset, dst, tzname))
cls = type(zone, (DstTzInfo,), dict(
zone=zone,
_utc_transition_times=transitions,
_transition_info=transition_info))
return cls()
if __name__ == '__main__':
import os.path
from pprint import pprint
base = os.path.join(os.path.dirname(__file__), 'zoneinfo')
tz = build_tzinfo('Australia/Melbourne',
open(os.path.join(base,'Australia','Melbourne'), 'rb'))
tz = build_tzinfo('US/Eastern',
open(os.path.join(base,'US','Eastern'), 'rb'))
pprint(tz._utc_transition_times)
#print tz.asPython(4)
#print tz.transitions_mapping
|
bgat/linux-multi-v7 | refs/heads/master | tools/perf/scripts/python/check-perf-trace.py | 1997 | # perf script event handlers, generated by perf script -g python
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# This script tests basic functionality such as flag and symbol
# strings, common_xxx() calls back into perf, begin, end, unhandled
# events, etc. Basically, if this script runs successfully and
# displays expected results, Python scripting support should be ok.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Core import *
from perf_trace_context import *
unhandled = autodict()
def trace_begin():
print "trace_begin"
pass
def trace_end():
print_unhandled()
def irq__softirq_entry(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, vec):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "vec=%s\n" % \
(symbol_str("irq__softirq_entry", "vec", vec)),
def kmem__kmalloc(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, call_site, ptr, bytes_req, bytes_alloc,
gfp_flags):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "call_site=%u, ptr=%u, bytes_req=%u, " \
"bytes_alloc=%u, gfp_flags=%s\n" % \
(call_site, ptr, bytes_req, bytes_alloc,
flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)),
def trace_unhandled(event_name, context, event_fields_dict):
try:
unhandled[event_name] += 1
except TypeError:
unhandled[event_name] = 1
def print_header(event_name, cpu, secs, nsecs, pid, comm):
print "%-20s %5u %05u.%09u %8u %-20s " % \
(event_name, cpu, secs, nsecs, pid, comm),
# print trace fields not included in handler args
def print_uncommon(context):
print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \
% (common_pc(context), trace_flag_str(common_flags(context)), \
common_lock_depth(context))
def print_unhandled():
keys = unhandled.keys()
if not keys:
return
print "\nunhandled events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for event_name in keys:
print "%-40s %10d\n" % (event_name, unhandled[event_name])
|
zmallen/pygraylog | refs/heads/master | pygraylog/graylogapi.py | 1 | import base64
import requests
from endpoints import endpoints
from urlparse import urlparse
class GraylogAPI(object):
def __init__(self, url, username=None, password=None, api_key=None):
# check if the username / password vs. api_key cannot be provided at same time
if username is not None and api_key is not None:
raise ValueError('username / api_key cannot be provided at same time')
self.url = url
self._path = urlparse(self.url).path
self.username = username
self.password = password
self.api_key = api_key
self.methods = {
'get': self._get,
'post': self._post,
'put': self._put,
'delete': self._delete
}
def _(self, name):
url = self.url + '/' + name
return GraylogAPI(url, username=self.username, password=self.password,
api_key=self.api_key)
def __getattr__(self, name):
if name in self.methods.keys():
def method(**kwargs):
res = self.call(name, **kwargs)
return res
return method
else:
return self._(name)
def build_auth_header(self):
# use the api_key if it is been provided
if self.api_key is not None:
payload = self.api_key + ':token'
else:
payload = self.username + ':' + self.password
header = {
'Authorization' : 'Basic ' + base64.b64encode(payload),
'Accept' : 'application/json'
}
return header
def _get(self, **kwargs):
headers = self.build_auth_header()
r = requests.get(self.url, params=kwargs, headers=headers)
return r.text
def _post(self, **kwargs):
raise NotImplementedError('POST not implemented')
def _put(self, **kwargs):
raise NotImplementedError('PUT not implemented')
def _delete(self, **kwargs):
raise NotImplementedError('DELETE not implemented')
def call(self, method, **kwargs):
arg_names = kwargs.keys()
required_args = endpoints[self._path]
if not set(required_args).issubset(set(arg_names)):
raise ValueError(('Not all required arguments passed for %s.\n' +
'Given: %s\nRequired: %s')
% (self._path, arg_names, required_args))
for arg in required_args:
if arg in kwargs and arg[-1] == '_':
kwargs[arg[:-1]] = kwargs.pop(arg)
res = self.methods[method](**kwargs)
return res
|
mne-tools/mne-tools.github.io | refs/heads/main | 0.18/_downloads/8ec5aedda7e79549d1003f83773d85a3/plot_montage.py | 2 | # -*- coding: utf-8 -*-
"""
.. _plot_montage:
Plotting sensor layouts of EEG Systems
======================================
This example illustrates how to load all the EEG system montages
shipped in MNE-python, and display it on fsaverage template.
""" # noqa: D205, D400
# Authors: Alexandre Gramfort <[email protected]>
# Joan Massich <[email protected]>
#
# License: BSD Style.
from mayavi import mlab
import os.path as op
import mne
from mne.channels.montage import get_builtin_montages
from mne.datasets import fetch_fsaverage
from mne.viz import plot_alignment
subjects_dir = op.dirname(fetch_fsaverage())
###############################################################################
# check all montages
#
for current_montage in get_builtin_montages():
montage = mne.channels.read_montage(current_montage,
unit='auto',
transform=False)
info = mne.create_info(ch_names=montage.ch_names,
sfreq=1,
ch_types='eeg',
montage=montage)
fig = plot_alignment(info, trans=None,
subject='fsaverage',
subjects_dir=subjects_dir,
eeg=['projected'],
)
mlab.view(135, 80)
mlab.title(montage.kind, figure=fig)
|
vasekhodina/blivet_gv_visualization | refs/heads/master | tests/test_visualizer.py | 1 | #No test functions as visualizer doesn't have anything worth testing
|
bwhite/hadoopy | refs/heads/master | hadoopy/thirdparty/pyinstaller/PyInstaller/hooks/hook-PyQt4.QtGui.py | 3 | hiddenimports = ['sip', 'PyQt4.QtCore', 'PyQt4._qt']
from PyInstaller.hooks.hookutils import qt4_plugins_binaries
def hook(mod):
mod.binaries.extend(qt4_plugins_binaries('accessible'))
mod.binaries.extend(qt4_plugins_binaries('iconengines'))
mod.binaries.extend(qt4_plugins_binaries('imageformats'))
mod.binaries.extend(qt4_plugins_binaries('inputmethods'))
mod.binaries.extend(qt4_plugins_binaries('graphicssystems'))
return mod
|
popazerty/openblackhole-SH4 | refs/heads/master | lib/python/Components/Task.py | 55 | # A Job consists of many "Tasks".
# A task is the run of an external tool, with proper methods for failure handling
from Tools.CList import CList
class Job(object):
NOT_STARTED, IN_PROGRESS, FINISHED, FAILED = range(4)
def __init__(self, name):
self.tasks = [ ]
self.resident_tasks = [ ]
self.workspace = "/tmp"
self.current_task = 0
self.callback = None
self.name = name
self.finished = False
self.end = 100
self.__progress = 0
self.weightScale = 1
self.afterEvent = None
self.state_changed = CList()
self.status = self.NOT_STARTED
self.onSuccess = None
# description is a dict
def fromDescription(self, description):
pass
def createDescription(self):
return None
def getProgress(self):
if self.current_task == len(self.tasks):
return self.end
t = self.tasks[self.current_task]
jobprogress = t.weighting * t.progress / float(t.end) + sum([task.weighting for task in self.tasks[:self.current_task]])
return int(jobprogress*self.weightScale)
progress = property(getProgress)
def getStatustext(self):
return { self.NOT_STARTED: _("Waiting"), self.IN_PROGRESS: _("In progress"), self.FINISHED: _("Finished"), self.FAILED: _("Failed") }[self.status]
def task_progress_changed_CB(self):
self.state_changed()
def addTask(self, task):
task.job = self
task.task_progress_changed = self.task_progress_changed_CB
self.tasks.append(task)
def start(self, callback):
assert self.callback is None
self.callback = callback
self.restart()
def restart(self):
self.status = self.IN_PROGRESS
self.state_changed()
self.runNext()
sumTaskWeightings = sum([t.weighting for t in self.tasks]) or 1
self.weightScale = self.end / float(sumTaskWeightings)
def runNext(self):
if self.current_task == len(self.tasks):
if len(self.resident_tasks) == 0:
self.status = self.FINISHED
self.state_changed()
self.callback(self, None, [])
self.callback = None
else:
print "still waiting for %d resident task(s) %s to finish" % (len(self.resident_tasks), str(self.resident_tasks))
else:
self.tasks[self.current_task].run(self.taskCallback)
self.state_changed()
def taskCallback(self, task, res, stay_resident = False):
cb_idx = self.tasks.index(task)
if stay_resident:
if cb_idx not in self.resident_tasks:
self.resident_tasks.append(self.current_task)
print "task going resident:", task
else:
print "task keeps staying resident:", task
return
if len(res):
print ">>> Error:", res
self.status = self.FAILED
self.state_changed()
self.callback(self, task, res)
if cb_idx != self.current_task:
if cb_idx in self.resident_tasks:
print "resident task finished:", task
self.resident_tasks.remove(cb_idx)
if not res:
self.state_changed()
self.current_task += 1
self.runNext()
def retry(self):
assert self.status == self.FAILED
self.restart()
def abort(self):
if self.current_task < len(self.tasks):
self.tasks[self.current_task].abort()
for i in self.resident_tasks:
self.tasks[i].abort()
def cancel(self):
self.abort()
def __str__(self):
return "Components.Task.Job name=%s #tasks=%s" % (self.name, len(self.tasks))
class Task(object):
def __init__(self, job, name):
self.name = name
self.immediate_preconditions = [ ]
self.global_preconditions = [ ]
self.postconditions = [ ]
self.returncode = None
self.initial_input = None
self.job = None
self.end = 100
self.weighting = 100
self.__progress = 0
self.cmd = None
self.cwd = "/tmp"
self.args = [ ]
self.cmdline = None
self.task_progress_changed = None
self.output_line = ""
job.addTask(self)
self.container = None
def setCommandline(self, cmd, args):
self.cmd = cmd
self.args = args
def setTool(self, tool):
self.cmd = tool
self.args = [tool]
self.global_preconditions.append(ToolExistsPrecondition())
self.postconditions.append(ReturncodePostcondition())
def setCmdline(self, cmdline):
self.cmdline = cmdline
def checkPreconditions(self, immediate = False):
not_met = [ ]
if immediate:
preconditions = self.immediate_preconditions
else:
preconditions = self.global_preconditions
for precondition in preconditions:
if not precondition.check(self):
not_met.append(precondition)
return not_met
def _run(self):
if (self.cmd is None) and (self.cmdline is None):
self.finish()
return
from enigma import eConsoleAppContainer
self.container = eConsoleAppContainer()
self.container.appClosed.append(self.processFinished)
self.container.stdoutAvail.append(self.processStdout)
self.container.stderrAvail.append(self.processStderr)
if self.cwd is not None:
self.container.setCWD(self.cwd)
if not self.cmd and self.cmdline:
print "execute:", self.container.execute(self.cmdline), self.cmdline
else:
assert self.cmd is not None
assert len(self.args) >= 1
print "execute:", self.container.execute(self.cmd, *self.args), ' '.join(self.args)
if self.initial_input:
self.writeInput(self.initial_input)
def run(self, callback):
failed_preconditions = self.checkPreconditions(True) + self.checkPreconditions(False)
if failed_preconditions:
print "[Task] preconditions failed"
callback(self, failed_preconditions)
return
self.callback = callback
try:
self.prepare()
self._run()
except Exception, ex:
print "[Task] exception:", ex
self.postconditions = [FailedPostcondition(ex)]
self.finish()
def prepare(self):
pass
def cleanup(self, failed):
pass
def processStdout(self, data):
self.processOutput(data)
def processStderr(self, data):
self.processOutput(data)
def processOutput(self, data):
self.output_line += data
while True:
i = self.output_line.find('\n')
if i == -1:
break
self.processOutputLine(self.output_line[:i+1])
self.output_line = self.output_line[i+1:]
def processOutputLine(self, line):
print "[Task %s]" % self.name, line[:-1]
pass
def processFinished(self, returncode):
self.returncode = returncode
self.finish()
def abort(self):
if self.container:
self.container.kill()
self.finish(aborted = True)
def finish(self, aborted = False):
self.afterRun()
not_met = [ ]
if aborted:
not_met.append(AbortedPostcondition())
else:
for postcondition in self.postconditions:
if not postcondition.check(self):
not_met.append(postcondition)
self.cleanup(not_met)
self.callback(self, not_met)
def afterRun(self):
pass
def writeInput(self, input):
self.container.write(input)
def getProgress(self):
return self.__progress
def setProgress(self, progress):
if progress > self.end:
progress = self.end
if progress < 0:
progress = 0
self.__progress = progress
if self.task_progress_changed:
self.task_progress_changed()
progress = property(getProgress, setProgress)
def __str__(self):
return "Components.Task.Task name=%s" % self.name
class LoggingTask(Task):
def __init__(self, job, name):
Task.__init__(self, job, name)
self.log = []
def processOutput(self, data):
print "[%s]" % self.name, data,
self.log.append(data)
class PythonTask(Task):
def _run(self):
from twisted.internet import threads
from enigma import eTimer
self.aborted = False
self.pos = 0
threads.deferToThread(self.work).addBoth(self.onComplete)
self.timer = eTimer()
self.timer.callback.append(self.onTimer)
self.timer.start(5)
def work(self):
raise NotImplemented, "work"
def abort(self):
self.aborted = True
if self.callback is None:
self.finish(aborted = True)
def onTimer(self):
self.setProgress(self.pos)
def onComplete(self, result):
self.postconditions.append(FailedPostcondition(result))
self.timer.stop()
del self.timer
self.finish()
class ConditionTask(Task):
"""
Reactor-driven pthread_condition.
Wait for something to happen. Call trigger when something occurs that
is likely to make check() return true. Raise exception in check() to
signal error.
Default is to call trigger() once per second, override prepare/cleanup
to do something else (like waiting for hotplug)...
"""
def __init__(self, job, name, timeoutCount=None):
Task.__init__(self, job, name)
self.timeoutCount = timeoutCount
def _run(self):
self.triggerCount = 0
def prepare(self):
from enigma import eTimer
self.timer = eTimer()
self.timer.callback.append(self.trigger)
self.timer.start(1000)
def cleanup(self, failed):
if hasattr(self, 'timer'):
self.timer.stop()
del self.timer
def check(self):
# override to return True only when condition triggers
return True
def trigger(self):
self.triggerCount += 1
try:
if (self.timeoutCount is not None) and (self.triggerCount > self.timeoutCount):
raise Exception, "Timeout elapsed, sorry"
res = self.check()
except Exception, e:
self.postconditions.append(FailedPostcondition(e))
res = True
if res:
self.finish()
# The jobmanager will execute multiple jobs, each after another.
# later, it will also support suspending jobs (and continuing them after reboot etc)
# It also supports a notification when some error occurred, and possibly a retry.
class JobManager:
def __init__(self):
self.active_jobs = [ ]
self.failed_jobs = [ ]
self.job_classes = [ ]
self.in_background = False
self.visible = False
self.active_job = None
# Set onSuccess to popupTaskView to get a visible notification.
# onFail defaults to notifyFailed which tells the user that it went south.
def AddJob(self, job, onSuccess=None, onFail=None):
job.onSuccess = onSuccess
if onFail is None:
job.onFail = self.notifyFailed
else:
job.onFail = onFail
self.active_jobs.append(job)
self.kick()
def kick(self):
if self.active_job is None:
if self.active_jobs:
self.active_job = self.active_jobs.pop(0)
self.active_job.start(self.jobDone)
def notifyFailed(self, job, task, problems):
from Tools import Notifications
from Screens.MessageBox import MessageBox
if problems[0].RECOVERABLE:
Notifications.AddNotificationWithCallback(self.errorCB, MessageBox, _("Error: %s\nRetry?") % (problems[0].getErrorMessage(task)))
return True
else:
Notifications.AddNotification(MessageBox, job.name + "\n" + _("Error") + ': %s' % (problems[0].getErrorMessage(task)), type = MessageBox.TYPE_ERROR )
return False
def jobDone(self, job, task, problems):
print "job", job, "completed with", problems, "in", task
if problems:
if not job.onFail(job, task, problems):
self.errorCB(False)
else:
self.active_job = None
if job.onSuccess:
job.onSuccess(job)
self.kick()
# Set job.onSuccess to this function if you want to pop up the jobview when the job is done/
def popupTaskView(self, job):
if not self.visible:
from Tools import Notifications
from Screens.TaskView import JobView
self.visible = True
Notifications.AddNotification(JobView, job)
def errorCB(self, answer):
if answer:
print "retrying job"
self.active_job.retry()
else:
print "not retrying job."
self.failed_jobs.append(self.active_job)
self.active_job = None
self.kick()
def getPendingJobs(self):
list = [ ]
if self.active_job:
list.append(self.active_job)
list += self.active_jobs
return list
# some examples:
#class PartitionExistsPostcondition:
# def __init__(self, device):
# self.device = device
#
# def check(self, task):
# import os
# return os.access(self.device + "part1", os.F_OK)
#
#class CreatePartitionTask(Task):
# def __init__(self, device):
# Task.__init__(self, "Creating partition")
# self.device = device
# self.setTool("/sbin/sfdisk")
# self.args += ["-f", self.device + "disc"]
# self.initial_input = "0,\n;\n;\n;\ny\n"
# self.postconditions.append(PartitionExistsPostcondition(self.device))
#
#class CreateFilesystemTask(Task):
# def __init__(self, device, partition = 1, largefile = True):
# Task.__init__(self, "Creating filesystem")
# self.setTool("/sbin/mkfs.ext")
# if largefile:
# self.args += ["-T", "largefile"]
# self.args.append("-m0")
# self.args.append(device + "part%d" % partition)
#
#class FilesystemMountTask(Task):
# def __init__(self, device, partition = 1, filesystem = "ext3"):
# Task.__init__(self, "Mounting filesystem")
# self.setTool("/bin/mount")
# if filesystem is not None:
# self.args += ["-t", filesystem]
# self.args.append(device + "part%d" % partition)
class Condition:
def __init__(self):
pass
RECOVERABLE = False
def getErrorMessage(self, task):
return _("An unknown error occurred!") + " (%s @ task %s)" % (self.__class__.__name__, task.__class__.__name__)
class WorkspaceExistsPrecondition(Condition):
def __init__(self):
pass
def check(self, task):
return os.access(task.job.workspace, os.W_OK)
class DiskspacePrecondition(Condition):
def __init__(self, diskspace_required):
self.diskspace_required = diskspace_required
self.diskspace_available = 0
def check(self, task):
import os
try:
s = os.statvfs(task.job.workspace)
self.diskspace_available = s.f_bsize * s.f_bavail
return self.diskspace_available >= self.diskspace_required
except OSError:
return False
def getErrorMessage(self, task):
return _("Not enough disk space. Please free up some disk space and try again. (%d MB required, %d MB available)") % (self.diskspace_required / 1024 / 1024, self.diskspace_available / 1024 / 1024)
class ToolExistsPrecondition(Condition):
def __init__(self):
pass
def check(self, task):
import os
if task.cmd[0]=='/':
self.realpath = task.cmd
print "[Task.py][ToolExistsPrecondition] WARNING: usage of absolute paths for tasks should be avoided!"
return os.access(self.realpath, os.X_OK)
else:
self.realpath = task.cmd
path = os.environ.get('PATH', '').split(os.pathsep)
path.append(task.cwd + '/')
absolutes = filter(lambda file: os.access(file, os.X_OK), map(lambda directory, file = task.cmd: os.path.join(directory, file), path))
if absolutes:
self.realpath = absolutes[0]
return True
return False
def getErrorMessage(self, task):
return _("A required tool (%s) was not found.") % self.realpath
class AbortedPostcondition(Condition):
def __init__(self):
pass
def getErrorMessage(self, task):
return "Cancelled upon user request"
class ReturncodePostcondition(Condition):
def __init__(self):
pass
def check(self, task):
return task.returncode == 0
def getErrorMessage(self, task):
if hasattr(task, 'log') and task.log:
log = ''.join(task.log).strip()
log = log.split('\n')[-3:]
log = '\n'.join(log)
return log
else:
return _("Error code") + ": %s" % task.returncode
class FailedPostcondition(Condition):
def __init__(self, exception):
self.exception = exception
def getErrorMessage(self, task):
if isinstance(self.exception, int):
if hasattr(task, 'log'):
log = ''.join(task.log).strip()
log = log.split('\n')[-4:]
log = '\n'.join(log)
return log
else:
return _("Error code") + " %s" % self.exception
return str(self.exception)
def check(self, task):
return (self.exception is None) or (self.exception == 0)
#class HDDInitJob(Job):
# def __init__(self, device):
# Job.__init__(self, _("Initialize Harddisk"))
# self.device = device
# self.fromDescription(self.createDescription())
#
# def fromDescription(self, description):
# self.device = description["device"]
# self.addTask(CreatePartitionTask(self.device))
# self.addTask(CreateFilesystemTask(self.device))
# self.addTask(FilesystemMountTask(self.device))
#
# def createDescription(self):
# return {"device": self.device}
job_manager = JobManager()
|
TimYi/django | refs/heads/master | tests/m2m_through/tests.py | 295 | from __future__ import unicode_literals
from datetime import datetime
from operator import attrgetter
from django.test import TestCase
from .models import (
CustomMembership, Employee, Event, Friendship, Group, Ingredient,
Invitation, Membership, Person, PersonSelfRefM2M, Recipe, RecipeIngredient,
Relationship,
)
class M2mThroughTests(TestCase):
def setUp(self):
self.bob = Person.objects.create(name='Bob')
self.jim = Person.objects.create(name='Jim')
self.jane = Person.objects.create(name='Jane')
self.rock = Group.objects.create(name='Rock')
self.roll = Group.objects.create(name='Roll')
def test_retrieve_intermediate_items(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
expected = ['Jane', 'Jim']
self.assertQuerysetEqual(
self.rock.members.all(),
expected,
attrgetter("name")
)
def test_get_on_intermediate_model(self):
Membership.objects.create(person=self.jane, group=self.rock)
queryset = Membership.objects.get(person=self.jane, group=self.rock)
self.assertEqual(
repr(queryset),
'<Membership: Jane is a member of Rock>'
)
def test_filter_on_intermediate_model(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
queryset = Membership.objects.filter(group=self.rock)
expected = [
'<Membership: Jim is a member of Rock>',
'<Membership: Jane is a member of Rock>',
]
self.assertQuerysetEqual(
queryset,
expected
)
def test_cannot_use_add_on_m2m_with_intermediary_model(self):
msg = 'Cannot use add() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.rock.members.add(self.bob)
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_cannot_use_create_on_m2m_with_intermediary_model(self):
msg = 'Cannot use create() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.rock.members.create(name='Annie')
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_cannot_use_remove_on_m2m_with_intermediary_model(self):
Membership.objects.create(person=self.jim, group=self.rock)
msg = 'Cannot use remove() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.rock.members.remove(self.jim)
self.assertQuerysetEqual(
self.rock.members.all(),
['Jim', ],
attrgetter("name")
)
def test_cannot_use_setattr_on_m2m_with_intermediary_model(self):
msg = 'Cannot set values on a ManyToManyField which specifies an intermediary model'
members = list(Person.objects.filter(name__in=['Bob', 'Jim']))
with self.assertRaisesMessage(AttributeError, msg):
setattr(self.rock, 'members', members)
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_clear_removes_all_the_m2m_relationships(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
self.rock.members.clear()
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_retrieve_reverse_intermediate_items(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jim, group=self.roll)
expected = ['Rock', 'Roll']
self.assertQuerysetEqual(
self.jim.group_set.all(),
expected,
attrgetter("name")
)
def test_cannot_use_add_on_reverse_m2m_with_intermediary_model(self):
msg = 'Cannot use add() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.add(self.bob)
self.assertQuerysetEqual(
self.bob.group_set.all(),
[]
)
def test_cannot_use_create_on_reverse_m2m_with_intermediary_model(self):
msg = 'Cannot use create() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.create(name='Funk')
self.assertQuerysetEqual(
self.bob.group_set.all(),
[]
)
def test_cannot_use_remove_on_reverse_m2m_with_intermediary_model(self):
Membership.objects.create(person=self.bob, group=self.rock)
msg = 'Cannot use remove() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.remove(self.rock)
self.assertQuerysetEqual(
self.bob.group_set.all(),
['Rock', ],
attrgetter('name')
)
def test_cannot_use_setattr_on_reverse_m2m_with_intermediary_model(self):
msg = 'Cannot set values on a ManyToManyField which specifies an intermediary model'
members = list(Group.objects.filter(name__in=['Rock', 'Roll']))
with self.assertRaisesMessage(AttributeError, msg):
setattr(self.bob, 'group_set', members)
self.assertQuerysetEqual(
self.bob.group_set.all(),
[]
)
def test_clear_on_reverse_removes_all_the_m2m_relationships(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jim, group=self.roll)
self.jim.group_set.clear()
self.assertQuerysetEqual(
self.jim.group_set.all(),
[]
)
def test_query_model_by_attribute_name_of_related_model(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
Membership.objects.create(person=self.bob, group=self.roll)
Membership.objects.create(person=self.jim, group=self.roll)
Membership.objects.create(person=self.jane, group=self.roll)
self.assertQuerysetEqual(
Group.objects.filter(members__name='Bob'),
['Roll', ],
attrgetter("name")
)
def test_query_first_model_by_intermediate_model_attribute(self):
Membership.objects.create(
person=self.jane, group=self.roll,
invite_reason="She was just awesome."
)
Membership.objects.create(
person=self.jim, group=self.roll,
invite_reason="He is good."
)
Membership.objects.create(person=self.bob, group=self.roll)
qs = Group.objects.filter(
membership__invite_reason="She was just awesome."
)
self.assertQuerysetEqual(
qs,
['Roll'],
attrgetter("name")
)
def test_query_second_model_by_intermediate_model_attribute(self):
Membership.objects.create(
person=self.jane, group=self.roll,
invite_reason="She was just awesome."
)
Membership.objects.create(
person=self.jim, group=self.roll,
invite_reason="He is good."
)
Membership.objects.create(person=self.bob, group=self.roll)
qs = Person.objects.filter(
membership__invite_reason="She was just awesome."
)
self.assertQuerysetEqual(
qs,
['Jane'],
attrgetter("name")
)
def test_query_model_by_related_model_name(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
Membership.objects.create(person=self.bob, group=self.roll)
Membership.objects.create(person=self.jim, group=self.roll)
Membership.objects.create(person=self.jane, group=self.roll)
self.assertQuerysetEqual(
Person.objects.filter(group__name="Rock"),
['Jane', 'Jim'],
attrgetter("name")
)
def test_query_model_by_custom_related_name(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jim, group=self.rock)
self.assertQuerysetEqual(
Person.objects.filter(custom__name="Rock"),
['Bob', 'Jim'],
attrgetter("name")
)
def test_query_model_by_intermediate_can_return_non_unique_queryset(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(
person=self.jane, group=self.rock,
date_joined=datetime(2006, 1, 1)
)
Membership.objects.create(
person=self.bob, group=self.roll,
date_joined=datetime(2004, 1, 1))
Membership.objects.create(person=self.jim, group=self.roll)
Membership.objects.create(
person=self.jane, group=self.roll,
date_joined=datetime(2004, 1, 1))
qs = Person.objects.filter(
membership__date_joined__gt=datetime(2004, 1, 1)
)
self.assertQuerysetEqual(
qs,
['Jane', 'Jim', 'Jim'],
attrgetter("name")
)
def test_custom_related_name_forward_empty_qs(self):
self.assertQuerysetEqual(
self.rock.custom_members.all(),
[]
)
def test_custom_related_name_reverse_empty_qs(self):
self.assertQuerysetEqual(
self.bob.custom.all(),
[]
)
def test_custom_related_name_forward_non_empty_qs(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jim, group=self.rock)
self.assertQuerysetEqual(
self.rock.custom_members.all(),
['Bob', 'Jim'],
attrgetter("name")
)
def test_custom_related_name_reverse_non_empty_qs(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jim, group=self.rock)
self.assertQuerysetEqual(
self.bob.custom.all(),
['Rock'],
attrgetter("name")
)
def test_custom_related_name_doesnt_conflict_with_fky_related_name(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
self.assertQuerysetEqual(
self.bob.custom_person_related_name.all(),
['<CustomMembership: Bob is a member of Rock>']
)
def test_through_fields(self):
"""
Tests that relations with intermediary tables with multiple FKs
to the M2M's ``to`` model are possible.
"""
event = Event.objects.create(title='Rockwhale 2014')
Invitation.objects.create(event=event, inviter=self.bob, invitee=self.jim)
Invitation.objects.create(event=event, inviter=self.bob, invitee=self.jane)
self.assertQuerysetEqual(
event.invitees.all(),
['Jane', 'Jim'],
attrgetter('name')
)
class M2mThroughReferentialTests(TestCase):
def test_self_referential_empty_qs(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
self.assertQuerysetEqual(
tony.friends.all(),
[]
)
def test_self_referential_non_symmentrical_first_side(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
self.assertQuerysetEqual(
tony.friends.all(),
['Chris'],
attrgetter("name")
)
def test_self_referential_non_symmentrical_second_side(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
self.assertQuerysetEqual(
chris.friends.all(),
[]
)
def test_self_referential_non_symmentrical_clear_first_side(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
chris.friends.clear()
self.assertQuerysetEqual(
chris.friends.all(),
[]
)
# Since this isn't a symmetrical relation, Tony's friend link still exists.
self.assertQuerysetEqual(
tony.friends.all(),
['Chris'],
attrgetter("name")
)
def test_self_referential_symmentrical(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
Friendship.objects.create(
first=chris, second=tony, date_friended=datetime.now()
)
self.assertQuerysetEqual(
tony.friends.all(),
['Chris'],
attrgetter("name")
)
self.assertQuerysetEqual(
chris.friends.all(),
['Tony'],
attrgetter("name")
)
def test_through_fields_self_referential(self):
john = Employee.objects.create(name='john')
peter = Employee.objects.create(name='peter')
mary = Employee.objects.create(name='mary')
harry = Employee.objects.create(name='harry')
Relationship.objects.create(source=john, target=peter, another=None)
Relationship.objects.create(source=john, target=mary, another=None)
Relationship.objects.create(source=john, target=harry, another=peter)
self.assertQuerysetEqual(
john.subordinates.all(),
['peter', 'mary', 'harry'],
attrgetter('name')
)
class M2mThroughToFieldsTests(TestCase):
def setUp(self):
self.pea = Ingredient.objects.create(iname='pea')
self.potato = Ingredient.objects.create(iname='potato')
self.tomato = Ingredient.objects.create(iname='tomato')
self.curry = Recipe.objects.create(rname='curry')
RecipeIngredient.objects.create(recipe=self.curry, ingredient=self.potato)
RecipeIngredient.objects.create(recipe=self.curry, ingredient=self.pea)
RecipeIngredient.objects.create(recipe=self.curry, ingredient=self.tomato)
def test_retrieval(self):
# Forward retrieval
self.assertQuerysetEqual(
self.curry.ingredients.all(),
[self.pea, self.potato, self.tomato], lambda x: x
)
# Backward retrieval
self.assertEqual(self.tomato.recipes.get(), self.curry)
def test_choices(self):
field = Recipe._meta.get_field('ingredients')
self.assertEqual(
[choice[0] for choice in field.get_choices(include_blank=False)],
['pea', 'potato', 'tomato']
)
|
prasen-ftech/pywinauto | refs/heads/master | examples/notepad_fast.py | 20 | # GUI Application automation and testing library
# Copyright (C) 2006 Mark Mc Mahon
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
"Run some automations to test things"
__revision__ = "$Revision: 214 $"
import time
try:
from pywinauto import application
except ImportError:
import os.path
pywinauto_path = os.path.abspath(__file__)
pywinauto_path = os.path.split(os.path.split(pywinauto_path)[0])[0]
import sys
sys.path.append(pywinauto_path)
from pywinauto import application
import pywinauto
from pywinauto import application
from pywinauto import tests
#from pywinauto.findbestmatch import MatchError
from pywinauto.timings import Timings
def RunNotepad():
"Run notepad and do some small stuff with it"
print "Run with option 'language' e.g. notepad_fast.py language to use"
print "application data. This should work on any language Windows/Notepad"
print
print "Trying fast timing settings - it's possible these won't work"
print "if pywinauto tries to access a window that is not accessible yet"
# use fast timings - but allow to wait for windows a long time
Timings.Fast()
Timings.window_find_timeout = 10
start = time.time()
run_with_appdata = False
if len(sys.argv) > 1 and sys.argv[1].lower() == 'language':
run_with_appdata = True
scriptdir = os.path.split(os.path.abspath(__file__))[0]
if run_with_appdata:
print "\nRunning this script so it will load application data and run"
print "against any lanuguage version of Notepad/Windows"
# make sure that the app data gets read from the same folder as
# the script
app = application.Application(
os.path.join(scriptdir, "Notepad_fast.pkl"))
else:
app = application.Application()
## for distribution we don't want to connect to anybodies application
## because we may mess up something they are working on!
#try:
# app.connect_(path = ur"c:\windows\system32\notepad.exe")
#except application.ProcessNotFoundError:
# app.start_(ur"c:\windows\system32\notepad.exe")
app.start_(ur"notepad.exe")
app.Notepad.MenuSelect("File->PageSetup")
# ----- Page Setup Dialog ----
# Select the 4th combobox item
app.PageSetupDlg.SizeComboBox.Select(4)
# Select the 'Letter' combobox item or the Letter
try:
app.PageSetupDlg.SizeComboBox.Select("Letter")
except ValueError:
app.PageSetupDlg.SizeComboBox.Select('Letter (8.5" x 11")')
app.PageSetupDlg.SizeComboBox.Select(2)
# run some tests on the Dialog. List of available tests:
# "AllControls",
# "AsianHotkey",
# "ComboBoxDroppedHeight",
# "CompareToRefFont",
# "LeadTrailSpaces",
# "MiscValues",
# "Missalignment",
# "MissingExtraString",
# "Overlapping",
# "RepeatedHotkey",
# "Translation",
# "Truncation",
bugs = app.PageSetupDlg.RunTests('RepeatedHotkey Truncation')
# if there are any bugs they will be printed to the console
# and the controls will be highlighted
tests.print_bugs(bugs)
# ----- Next Page Setup Dialog ----
app.PageSetupDlg.Printer.Click()
# do some radio button clicks
# Open the Connect to printer dialog so we can
# try out checking/unchecking a checkbox
app.PageSetupDlg.Network.Click()
# ----- Connect To Printer Dialog ----
# Select a checkbox
app.ConnectToPrinter.ExpandByDefault.Check()
app.ConnectToPrinter.ExpandByDefault.UnCheck()
# try doing the same by using click
app.ConnectToPrinter.ExpandByDefault.Click()
app.ConnectToPrinter.ExpandByDefault.Click()
# close the dialog
app.ConnectToPrinter.Cancel.CloseClick()
# ----- 2nd Page Setup Dialog again ----
app.PageSetupDlg.Properties.Click()
doc_props = app.window_(title_re = ".*Properties$")
doc_props.Wait('exists', timeout = 40)
#
# # ----- Document Properties Dialog ----
# # some tab control selections
# # Two ways of selecting tabs with indices...
# doc_props.TabCtrl.Select(0)
# doc_props.TabCtrl.Select(1)
# try:
# doc_props.TabCtrl.Select(2)
# except IndexError:
# # not all users have 3 tabs in this dialog
# pass
#
# # or with text...
# #doc_props.TabCtrl.Select("PaperQuality")
# doc_props.TabCtrl.Select(1)
#
# try:
# #doc_props.TabCtrl.Select("JobRetention")
# doc_props.TabCtrl.Select("3")
# except MatchError:
# # some people do not have the "Job Retention" tab
# pass
#
# doc_props.TabCtrl.Select("Finishing")
# #doc_props.TabCtrl.Select(0)
#
# # do some radio button clicks
# doc_props.RotatedLandscape.Click()
# doc_props.BackToFront.Click()
# doc_props.FlipOnShortEdge.Click()
#
# doc_props.Portrait.Click()
# doc_props._None.Click()
# #doc_props.FrontToBack.Click()
#
# # open the Advanced options dialog in two steps
# advbutton = doc_props.Advanced
# advbutton.Click()
#
# # close the 4 windows
#
# # ----- Advanced Options Dialog ----
# app.window_(title_re = ".* Advanced Options").Ok.Click()
# ----- Document Properties Dialog again ----
doc_props.Cancel.CloseClick()
# for some reason my current printer driver
# window does not close cleanly :(
if doc_props.Cancel.Exists():
doc_props.OK.CloseClick()
# ----- 2nd Page Setup Dialog again ----
app.PageSetupDlg.OK.CloseClick()
# ----- Page Setup Dialog ----
app.PageSetupDlg.Ok.CloseClick()
# type some text - note that extended characters ARE allowed
app.Notepad.Edit.SetEditText(u"I am typing s\xe4me text to Notepad\r\n\r\n"
"And then I am going to quit")
app.Notepad.Edit.RightClick()
app.Popup.MenuItem("Right To Left Reading Order").Click()
#app.PopupMenu.MenuSelect("Paste", app.Notepad.ctrl_())
#app.Notepad.Edit.RightClick()
#app.PopupMenu.MenuSelect(
# "Right To Left Reading Order", app.Notepad.ctrl_())
#app.PopupMenu.MenuSelect(
# "Show unicode control characters", app.Notepad.ctrl_())
#time.sleep(1)
#app.Notepad.Edit.RightClick()
#app.PopupMenu.MenuSelect("Right To Left Reading Order", app.Notepad.ctrl_())
#time.sleep(1)
#app.Notepad.Edit.RightClick()
#app.PopupMenu.MenuSelect(
# "Insert Unicode control character -> IAFS", app.Notepad.ctrl_())
#time.sleep(1)
#app.Notepad.Edit.TypeKeys("{ESC}")
# the following shows that Sendtext does not accept
# accented characters - but does allow 'control' characters
app.Notepad.Edit.TypeKeys(u"{END}{ENTER}SendText d\xf6\xe9s "
u"s\xfcpp\xf4rt \xe0cce\xf1ted characters!!!", with_spaces = True)
# Try and save
app.Notepad.MenuSelect("File->SaveAs")
app.SaveAs.EncodingComboBox.Select("UTF-8")
app.SaveAs.FileNameEdit.SetEditText("Example-utf8.txt")
app.SaveAs.Save.CloseClick()
# my machine has a weird problem - when connected to the network
# the SaveAs Dialog appears - but doing anything with it can
# cause a LONG delay - the easiest thing is to just wait
# until the dialog is no longer active
# - Dialog might just be gone - because click worked
# - dialog might be waiting to disappear
# so can't wait for next dialog or for it to be disabled
# - dialog might be waiting to display message box so can't wait
# for it to be gone or for the main dialog to be enabled.
# while the dialog exists wait upto 30 seconds (and yes it can
# take that long on my computer sometimes :-( )
app.SaveAsDialog2.Cancel.WaitNot('enabled')
# If file exists - it asks you if you want to overwrite
try:
app.SaveAs.Yes.Wait('exists').CloseClick()
except pywinauto.MatchError:
pass
# exit notepad
app.Notepad.MenuSelect("File->Exit")
if not run_with_appdata:
app.WriteAppData(os.path.join(scriptdir, "Notepad_fast.pkl"))
print "That took %.3f to run"% (time.time() - start)
if __name__ == "__main__":
RunNotepad()
|
hujiajie/chromium-crosswalk | refs/heads/master | tools/android/loading/log_requests.py | 7 | #! /usr/bin/python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Loads a URL on an Android device, logging all the requests made to do it
to a JSON file using DevTools.
"""
import contextlib
import httplib
import json
import logging
import optparse
import os
import sys
_SRC_DIR = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', '..', '..'))
sys.path.append(os.path.join(_SRC_DIR, 'third_party', 'catapult', 'devil'))
from devil.android import device_utils
sys.path.append(os.path.join(_SRC_DIR, 'build', 'android'))
import devil_chromium
sys.path.append(os.path.join(_SRC_DIR, 'tools', 'telemetry'))
from telemetry.internal.backends.chrome_inspector import inspector_websocket
from telemetry.internal.backends.chrome_inspector import websocket
sys.path.append(os.path.join(_SRC_DIR, 'tools', 'chrome_proxy'))
from common import inspector_network
import device_setup
class AndroidRequestsLogger(object):
"""Logs all the requests made to load a page on a device."""
def __init__(self, device):
"""If device is None, we connect to a local chrome session."""
self.device = device
self._please_stop = False
self._main_frame_id = None
self._tracing_data = []
def _PageDataReceived(self, msg):
"""Called when a Page event is received.
Records the main frame, and stops the recording once it has finished
loading.
Args:
msg: (dict) Message sent by DevTools.
"""
if 'params' not in msg:
return
params = msg['params']
method = msg.get('method', None)
if method == 'Page.frameStartedLoading' and self._main_frame_id is None:
self._main_frame_id = params['frameId']
elif (method == 'Page.frameStoppedLoading'
and params['frameId'] == self._main_frame_id):
self._please_stop = True
def _TracingDataReceived(self, msg):
self._tracing_data.append(msg)
def _LogPageLoadInternal(self, url, clear_cache):
"""Returns the collection of requests made to load a given URL.
Assumes that DevTools is available on http://localhost:DEVTOOLS_PORT.
Args:
url: URL to load.
clear_cache: Whether to clear the HTTP cache.
Returns:
[inspector_network.InspectorNetworkResponseData, ...]
"""
self._main_frame_id = None
self._please_stop = False
r = httplib.HTTPConnection(
device_setup.DEVTOOLS_HOSTNAME, device_setup.DEVTOOLS_PORT)
r.request('GET', '/json')
response = r.getresponse()
if response.status != 200:
logging.error('Cannot connect to the remote target.')
return None
json_response = json.loads(response.read())
r.close()
websocket_url = json_response[0]['webSocketDebuggerUrl']
ws = inspector_websocket.InspectorWebsocket()
ws.Connect(websocket_url)
inspector = inspector_network.InspectorNetwork(ws)
if clear_cache:
inspector.ClearCache()
ws.SyncRequest({'method': 'Page.enable'})
ws.RegisterDomain('Page', self._PageDataReceived)
inspector.StartMonitoringNetwork()
ws.SendAndIgnoreResponse({'method': 'Page.navigate',
'params': {'url': url}})
while not self._please_stop:
try:
ws.DispatchNotifications()
except websocket.WebSocketTimeoutException as e:
logging.warning('Exception: ' + str(e))
break
if not self._please_stop:
logging.warning('Finished with timeout instead of page load')
inspector.StopMonitoringNetwork()
return inspector.GetResponseData()
def _LogTracingInternal(self, url):
self._main_frame_id = None
self._please_stop = False
r = httplib.HTTPConnection('localhost', device_setup.DEVTOOLS_PORT)
r.request('GET', '/json')
response = r.getresponse()
if response.status != 200:
logging.error('Cannot connect to the remote target.')
return None
json_response = json.loads(response.read())
r.close()
websocket_url = json_response[0]['webSocketDebuggerUrl']
ws = inspector_websocket.InspectorWebsocket()
ws.Connect(websocket_url)
ws.RegisterDomain('Tracing', self._TracingDataReceived)
logging.warning('Tracing.start: ' +
str(ws.SyncRequest({'method': 'Tracing.start',
'options': 'zork'})))
ws.SendAndIgnoreResponse({'method': 'Page.navigate',
'params': {'url': url}})
while not self._please_stop:
try:
ws.DispatchNotifications()
except websocket.WebSocketTimeoutException:
break
if not self._please_stop:
logging.warning('Finished with timeout instead of page load')
return {'events': self._tracing_data,
'end': ws.SyncRequest({'method': 'Tracing.end'})}
def LogPageLoad(self, url, clear_cache, package):
"""Returns the collection of requests made to load a given URL on a device.
Args:
url: (str) URL to load on the device.
clear_cache: (bool) Whether to clear the HTTP cache.
Returns:
See _LogPageLoadInternal().
"""
return device_setup.SetUpAndExecute(
self.device, package,
lambda: self._LogPageLoadInternal(url, clear_cache))
def LogTracing(self, url):
"""Log tracing events from a load of the given URL.
TODO(mattcary): This doesn't work. It would be best to log tracing
simultaneously with network requests, but as that wasn't working the tracing
logging was broken out separately. It still doesn't work...
"""
return device_setup.SetUpAndExecute(
self.device, 'chrome', lambda: self._LogTracingInternal(url))
def _ResponseDataToJson(data):
"""Converts a list of inspector_network.InspectorNetworkResponseData to JSON.
Args:
data: as returned by _LogPageLoad()
Returns:
A JSON file with the following format:
[request1, request2, ...], and a request is:
{'status': str, 'headers': dict, 'request_headers': dict,
'timestamp': double, 'timing': dict, 'url': str,
'served_from_cache': bool, 'initiator': str})
"""
result = []
for r in data:
result.append({'status': r.status,
'headers': r.headers,
'request_headers': r.request_headers,
'timestamp': r.timestamp,
'timing': r.timing,
'url': r.url,
'served_from_cache': r.served_from_cache,
'initiator': r.initiator})
return json.dumps(result)
def _CreateOptionParser():
"""Returns the option parser for this tool."""
parser = optparse.OptionParser(description='Starts a browser on an Android '
'device, gathers the requests made to load a '
'page and dumps it to a JSON file.')
parser.add_option('--url', help='URL to load.',
default='https://www.google.com', metavar='URL')
parser.add_option('--output', help='Output file.', default='result.json')
parser.add_option('--no-clear-cache', help=('Do not clear the HTTP cache '
'before loading the URL.'),
default=True, action='store_false', dest='clear_cache')
parser.add_option('--package', help='Package info for chrome build. '
'See build/android/pylib/constants.',
default='chrome')
parser.add_option('--local', action='store_true', default=False,
help='Connect to local chrome session rather than android.')
return parser
def main():
logging.basicConfig(level=logging.WARNING)
parser = _CreateOptionParser()
options, _ = parser.parse_args()
devil_chromium.Initialize()
if options.local:
device = None
else:
devices = device_utils.DeviceUtils.HealthyDevices()
device = devices[0]
request_logger = AndroidRequestsLogger(device)
response_data = request_logger.LogPageLoad(
options.url, options.clear_cache, options.package)
json_data = _ResponseDataToJson(response_data)
with open(options.output, 'w') as f:
f.write(json_data)
if __name__ == '__main__':
main()
|
11craft/django-waffle | refs/heads/master | waffle/management/commands/switch.py | 4 | from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from waffle.models import Switch
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-l', '--list',
action='store_true', dest='list_switch', default=False,
help='List existing switchs.'),
make_option('--create',
action='store_true',
dest='create',
default=False,
help="If the switch doesn't exist, create it."),
)
help = 'Activate or deactivate a switch.'
args = '<switch_name> <on/off>'
def handle(self, switch_name=None, state=None, *args, **options):
list_switch = options['list_switch']
if list_switch:
print 'Switches:'
for switch in Switch.objects.iterator():
print switch.name, 'on' if switch.active else 'off'
return
if not (switch_name and state):
raise CommandError('You need to specify a switch name and state.')
if not state in ['on', 'off']:
raise CommandError('You need to specify state of switch with '
'"on" or "off".')
if options['create']:
switch, created = Switch.objects.get_or_create(name=switch_name)
if created:
print 'Creating switch: %s' % switch_name
else:
try:
switch = Switch.objects.get(name=switch_name)
except Switch.DoesNotExist:
raise CommandError("This switch doesn't exist.")
switch.active = state == "on"
switch.save()
|
slapec/bangoo | refs/heads/master | bangoo/theming/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
rapidhere/rpbtman_autosign | refs/heads/master | pytz/zoneinfo/MST.py | 9 | '''tzinfo timezone information for MST.'''
from pytz.tzinfo import StaticTzInfo
from pytz.tzinfo import memorized_timedelta as timedelta
class MST(StaticTzInfo):
'''MST timezone definition. See datetime.tzinfo for details'''
zone = 'MST'
_utcoffset = timedelta(seconds=-25200)
_tzname = 'MST'
MST = MST()
|
FractalBrew/GoogleMusicChannel.bundle | refs/heads/master | Contents/Libraries/Shared/lib2to3/tests/__init__.py | 308 | """Make tests/ into a package. This allows us to "import tests" and
have tests.all_tests be a TestSuite representing all test cases
from all test_*.py files in tests/."""
# Author: Collin Winter
import os
import os.path
import unittest
import types
from . import support
all_tests = unittest.TestSuite()
tests_dir = os.path.join(os.path.dirname(__file__), '..', 'tests')
tests = [t[0:-3] for t in os.listdir(tests_dir)
if t.startswith('test_') and t.endswith('.py')]
loader = unittest.TestLoader()
for t in tests:
__import__("",globals(),locals(),[t],level=1)
mod = globals()[t]
all_tests.addTests(loader.loadTestsFromModule(mod))
|
RudoCris/horizon | refs/heads/master | openstack_dashboard/dashboards/admin/networks/agents/tables.py | 61 | # Copyright 2014 Kylincloud
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django.template import defaultfilters as filters
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import exceptions
from horizon import tables
from horizon.utils import filters as utils_filters
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class DeleteDHCPAgent(tables.DeleteAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete DHCP Agent",
u"Delete DHCP Agents",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Deleted DHCP Agent",
u"Deleted DHCP Agents",
count
)
policy_rules = (("network", "delete_agent"),)
def delete(self, request, obj_id):
network_id = self.table.kwargs['network_id']
try:
api.neutron.remove_network_from_dhcp_agent(request, obj_id,
network_id)
except Exception as e:
msg = _('Failed to delete agent: %s') % e
LOG.info(msg)
redirect = reverse('horizon:admin:networks:detail',
args=[network_id])
exceptions.handle(request, msg, redirect=redirect)
class AddDHCPAgent(tables.LinkAction):
name = "add"
verbose_name = _("Add DHCP Agent")
url = "horizon:admin:networks:adddhcpagent"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "update_agent"),)
def get_link_url(self, datum=None):
network_id = self.table.kwargs['network_id']
return reverse(self.url, args=(network_id,))
def get_agent_status(agent):
if agent.admin_state_up:
return _('Enabled')
return _('Disabled')
def get_agent_state(agent):
if agent.alive:
return _('Up')
return _('Down')
class DHCPAgentsTable(tables.DataTable):
id = tables.Column('id', verbose_name=_('ID'), hidden=True)
host = tables.Column('host', verbose_name=_('Host'))
status = tables.Column(get_agent_status, verbose_name=_('Status'))
state = tables.Column(get_agent_state, verbose_name=_('Admin State'))
heartbeat_timestamp = tables.Column('heartbeat_timestamp',
verbose_name=_('Updated At'),
filters=(utils_filters.parse_isotime,
filters.timesince))
class Meta(object):
name = "agents"
verbose_name = _("DHCP Agents")
table_actions = (AddDHCPAgent, DeleteDHCPAgent)
row_actions = (DeleteDHCPAgent,)
hidden_title = False
|
ArcherSys/ArcherSys | refs/heads/master | eclipse/plugins/org.python.pydev.jython_4.5.5.201603221110/Lib/encodings/iso8859_13.py | 593 | """ Python Character Mapping Codec iso8859_13 generated from 'MAPPINGS/ISO8859/8859-13.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-13',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u201d' # 0xA1 -> RIGHT DOUBLE QUOTATION MARK
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\u201e' # 0xA5 -> DOUBLE LOW-9 QUOTATION MARK
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xd8' # 0xA8 -> LATIN CAPITAL LETTER O WITH STROKE
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u0156' # 0xAA -> LATIN CAPITAL LETTER R WITH CEDILLA
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\xc6' # 0xAF -> LATIN CAPITAL LETTER AE
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\u201c' # 0xB4 -> LEFT DOUBLE QUOTATION MARK
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\xf8' # 0xB8 -> LATIN SMALL LETTER O WITH STROKE
u'\xb9' # 0xB9 -> SUPERSCRIPT ONE
u'\u0157' # 0xBA -> LATIN SMALL LETTER R WITH CEDILLA
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
u'\xe6' # 0xBF -> LATIN SMALL LETTER AE
u'\u0104' # 0xC0 -> LATIN CAPITAL LETTER A WITH OGONEK
u'\u012e' # 0xC1 -> LATIN CAPITAL LETTER I WITH OGONEK
u'\u0100' # 0xC2 -> LATIN CAPITAL LETTER A WITH MACRON
u'\u0106' # 0xC3 -> LATIN CAPITAL LETTER C WITH ACUTE
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\u0118' # 0xC6 -> LATIN CAPITAL LETTER E WITH OGONEK
u'\u0112' # 0xC7 -> LATIN CAPITAL LETTER E WITH MACRON
u'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\u0179' # 0xCA -> LATIN CAPITAL LETTER Z WITH ACUTE
u'\u0116' # 0xCB -> LATIN CAPITAL LETTER E WITH DOT ABOVE
u'\u0122' # 0xCC -> LATIN CAPITAL LETTER G WITH CEDILLA
u'\u0136' # 0xCD -> LATIN CAPITAL LETTER K WITH CEDILLA
u'\u012a' # 0xCE -> LATIN CAPITAL LETTER I WITH MACRON
u'\u013b' # 0xCF -> LATIN CAPITAL LETTER L WITH CEDILLA
u'\u0160' # 0xD0 -> LATIN CAPITAL LETTER S WITH CARON
u'\u0143' # 0xD1 -> LATIN CAPITAL LETTER N WITH ACUTE
u'\u0145' # 0xD2 -> LATIN CAPITAL LETTER N WITH CEDILLA
u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\u014c' # 0xD4 -> LATIN CAPITAL LETTER O WITH MACRON
u'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd7' # 0xD7 -> MULTIPLICATION SIGN
u'\u0172' # 0xD8 -> LATIN CAPITAL LETTER U WITH OGONEK
u'\u0141' # 0xD9 -> LATIN CAPITAL LETTER L WITH STROKE
u'\u015a' # 0xDA -> LATIN CAPITAL LETTER S WITH ACUTE
u'\u016a' # 0xDB -> LATIN CAPITAL LETTER U WITH MACRON
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\u017b' # 0xDD -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
u'\u017d' # 0xDE -> LATIN CAPITAL LETTER Z WITH CARON
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S (German)
u'\u0105' # 0xE0 -> LATIN SMALL LETTER A WITH OGONEK
u'\u012f' # 0xE1 -> LATIN SMALL LETTER I WITH OGONEK
u'\u0101' # 0xE2 -> LATIN SMALL LETTER A WITH MACRON
u'\u0107' # 0xE3 -> LATIN SMALL LETTER C WITH ACUTE
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\u0119' # 0xE6 -> LATIN SMALL LETTER E WITH OGONEK
u'\u0113' # 0xE7 -> LATIN SMALL LETTER E WITH MACRON
u'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\u017a' # 0xEA -> LATIN SMALL LETTER Z WITH ACUTE
u'\u0117' # 0xEB -> LATIN SMALL LETTER E WITH DOT ABOVE
u'\u0123' # 0xEC -> LATIN SMALL LETTER G WITH CEDILLA
u'\u0137' # 0xED -> LATIN SMALL LETTER K WITH CEDILLA
u'\u012b' # 0xEE -> LATIN SMALL LETTER I WITH MACRON
u'\u013c' # 0xEF -> LATIN SMALL LETTER L WITH CEDILLA
u'\u0161' # 0xF0 -> LATIN SMALL LETTER S WITH CARON
u'\u0144' # 0xF1 -> LATIN SMALL LETTER N WITH ACUTE
u'\u0146' # 0xF2 -> LATIN SMALL LETTER N WITH CEDILLA
u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
u'\u014d' # 0xF4 -> LATIN SMALL LETTER O WITH MACRON
u'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf7' # 0xF7 -> DIVISION SIGN
u'\u0173' # 0xF8 -> LATIN SMALL LETTER U WITH OGONEK
u'\u0142' # 0xF9 -> LATIN SMALL LETTER L WITH STROKE
u'\u015b' # 0xFA -> LATIN SMALL LETTER S WITH ACUTE
u'\u016b' # 0xFB -> LATIN SMALL LETTER U WITH MACRON
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u017c' # 0xFD -> LATIN SMALL LETTER Z WITH DOT ABOVE
u'\u017e' # 0xFE -> LATIN SMALL LETTER Z WITH CARON
u'\u2019' # 0xFF -> RIGHT SINGLE QUOTATION MARK
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
jk1/intellij-community | refs/heads/master | python/lib/Lib/compiler/misc.py | 100 |
def flatten(tup):
elts = []
for elt in tup:
if isinstance(elt, tuple):
elts = elts + flatten(elt)
else:
elts.append(elt)
return elts
class Set:
def __init__(self):
self.elts = {}
def __len__(self):
return len(self.elts)
def __contains__(self, elt):
return self.elts.has_key(elt)
def add(self, elt):
self.elts[elt] = elt
def elements(self):
return self.elts.keys()
def has_elt(self, elt):
return self.elts.has_key(elt)
def remove(self, elt):
del self.elts[elt]
def copy(self):
c = Set()
c.elts.update(self.elts)
return c
class Stack:
def __init__(self):
self.stack = []
self.pop = self.stack.pop
def __len__(self):
return len(self.stack)
def push(self, elt):
self.stack.append(elt)
def top(self):
return self.stack[-1]
def __getitem__(self, index): # needed by visitContinue()
return self.stack[index]
MANGLE_LEN = 256 # magic constant from compile.c
def mangle(name, klass):
if not name.startswith('__'):
return name
if len(name) + 2 >= MANGLE_LEN:
return name
if name.endswith('__'):
return name
try:
i = 0
while klass[i] == '_':
i = i + 1
except IndexError:
return name
klass = klass[i:]
tlen = len(klass) + len(name)
if tlen > MANGLE_LEN:
klass = klass[:MANGLE_LEN-tlen]
return "_%s%s" % (klass, name)
def set_filename(filename, tree):
"""Set the filename attribute to filename on every node in tree"""
worklist = [tree]
while worklist:
node = worklist.pop(0)
node.filename = filename
worklist.extend(node.getChildNodes())
|
dknlght/dkodi | refs/heads/master | src/script.module.urlresolver/lib/urlresolver/plugins/twitchtv.py | 1 | """
Plugin for UrlResolver
Copyright (C) 2016 anxdpanic
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
from urlresolver.plugins.lib import helpers
from urlresolver.common import i18n
from urlresolver.resolver import UrlResolver, ResolverError
try:
from twitch.api import usher
from twitch import queries
from twitch.exceptions import ResourceUnavailableException
except ImportError:
usher = None
class TwitchResolver(UrlResolver):
name = 'twitch'
domains = ['twitch.tv']
pattern = r'https?://(?:www\.)?(twitch\.tv)/(.+?)(?:\?|$)'
exclusion_pattern = r'^https?://(?:www\.)?twitch\.tv/(?:directory|user|p|jobs|store|login|products|search|.+?/profile|videos/all)(?:[?/].*)?$'
def get_media_url(self, host, media_id):
queries.CLIENT_ID = self.get_setting('client_id')
videos = None
if media_id.count('/') == 0:
try:
videos = usher.live(media_id)
except ResourceUnavailableException as e:
raise ResolverError(e.message)
else:
url = self.get_url(host, media_id)
video_id = self._extract_video(url)
if video_id:
videos = usher.video(video_id)
try:
pass
except ResourceUnavailableException as e:
raise ResolverError(e.message)
if videos:
if 'error' in videos:
raise ResolverError('[%s] %s' % (str(videos['status']), videos['message']))
sources = [(source['name'], source['url']) for source in videos]
return helpers.pick_source(sources)
else:
raise ResolverError('No streamer name or VOD ID found')
def get_url(self, host, media_id):
return self._default_get_url(host, media_id, 'https://{host}/{media_id}')
@classmethod
def _is_enabled(cls):
if usher is None:
return False
return super(cls, cls)._is_enabled()
def valid_url(self, url, host):
if usher is not None:
if re.search(self.pattern, url, re.I):
return not re.match(self.exclusion_pattern, url, re.I) or any(host in domain.lower() for domain in self.domains)
return False
@classmethod
def get_settings_xml(cls):
xml = super(cls, cls).get_settings_xml()
xml.append('<setting id="%s_client_id" type="text" label="%s" default="%s"/>' % (cls.__name__, i18n('client_id'), 'am6l6dn0x3bxrdgc557p1qeg1ma3bto'))
return xml
@staticmethod
def _extract_video(id_string):
video_id = None
idx = id_string.find('?')
if idx >= 0:
id_string = id_string[:idx]
idx = id_string.rfind('/')
if idx >= 0:
id_string = id_string[:idx] + id_string[idx + 1:]
idx = id_string.rfind('/')
if idx >= 0:
id_string = id_string[idx + 1:]
if id_string.startswith("videos"):
id_string = "v" + id_string[6:]
if id_string.startswith('v') or id_string.startswith('c') or id_string.startswith('a'):
video_id = id_string
return video_id
|
anbangr/osci-project-config | refs/heads/develop | tools/ci2project.py | 11 | #! /usr/bin/env python
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import requests
done = False
last_change = {}
url = 'https://review.openstack.org/changes/'
params = {'q': '-age:1d', 'o': 'LABELS', 'n': '200'}
# This is what a change looks like
'''
{
"kind": "gerritcodereview#change",
"id": "openstack%2Ftripleo-image-elements~master~"
"Id520ea27f2803447eff654d14ba8cbb388502a52",
"project": "openstack/tripleo-image-elements",
"branch": "master",
"topic": "bug/1398951",
"change_id": "Id520ea27f2803447eff654d14ba8cbb388502a52",
"subject": "Change the kill_metadata executable strings in Neutron",
"status": "NEW",
"created": "2014-12-02 23:41:06.000000000",
"updated": "2014-12-02 23:41:09.698000000",
"mergeable": false,
"_sortkey": "003186ad00021d53",
"_number": 138579,
"owner": {
"name": "stephen-ma"
},
"labels": {
"Verified": {
"recommended": {
"name": "Jenkins"
},
"value": 1
},
"Code-Review": {},
"Workflow": {}
}
},
'''
while not done:
if last_change.get('_sortkey'):
params['N'] = last_change.get('_sortkey')
r = requests.get(url, params=params)
changes = json.loads(r.text[4:])
for change in changes:
if (not change.get('labels') or
not change.get('labels').get('Verified')):
continue
for key, value in change['labels']['Verified'].items():
if key == 'value':
continue
if key == 'blocking':
continue
if value['name'] == 'Jenkins':
continue
print "%s\t%s" % (change['project'], value['name'])
last_change = change
done = not last_change.get('_more_changes', False)
|
Barrog/C4-Datapack | refs/heads/master | data/jscript/quests/358_IllegitimateChildOfAGoddess/__init__.py | 1 | # Illegitimate Child Of A Goddess version 0.1
# by DrLecter
print "importing quests:",
import sys
from net.sf.l2j.gameserver.model.quest import State
from net.sf.l2j.gameserver.model.quest import QuestState
from net.sf.l2j.gameserver.model.quest.jython import QuestJython as JQuest
#Quest info
QUEST_NUMBER,QUEST_NAME,QUEST_DESCRIPTION = 358,"IllegitimateChildOfAGoddess","Illegitimate Child Of A Goddess"
#Variables
DROP_RATE=12 #in %
REQUIRED=108 #how many items will be paid for a reward (affects onkill sounds too)
#Quest items
SN_SCALE = 5868
#Rewards
REWARDS=range(6329,6340,2)+range(5364,5367,2)
#Changing this value to non-zero, will turn recipes to 100% instead of 70/60%
ALT_RP_100 = 0
#Messages
default = "<html><head><body>I have nothing to say to you.</body></html>"
#NPCs
OLTLIN = 7862
#Mobs
MOBS = [ 672,673 ]
class Quest (JQuest) :
def __init__(self,id,name,descr): JQuest.__init__(self,id,name,descr)
def onEvent (self,event,st) :
htmltext = event
if event == "7862-5.htm" :
st.setState(STARTED)
st.set("cond","1")
st.playSound("ItemSound.quest_accept")
elif event == "7862-6.htm" :
st.exitQuest(1)
elif event == "7862-7.htm" :
if st.getQuestItemsCount(SN_SCALE) >= REQUIRED :
st.takeItems(SN_SCALE,REQUIRED)
item=REWARDS[st.getRandom(len(REWARDS))]
if ALT_RP_100: item +=1
st.giveItems(item ,1)
st.exitQuest(1)
st.playSound("ItemSound.quest_finish")
else :
htmltext = "7862-4.htm"
return htmltext
def onTalk (self,npc,st):
htmltext = default
id = st.getState()
if id == CREATED :
st.set("cond","0")
if st.getPlayer().getLevel() < 63 :
st.exitQuest(1)
htmltext = "7862-1.htm"
else :
htmltext = "7862-2.htm"
elif id == STARTED :
if st.getQuestItemsCount(SN_SCALE) >= REQUIRED :
htmltext = "7862-3.htm"
else :
htmltext = "7862-4.htm"
return htmltext
def onKill (self,npc,st) :
count = st.getQuestItemsCount(SN_SCALE)
if count < REQUIRED and st.getRandom(100) < DROP_RATE :
st.giveItems(SN_SCALE,1)
if count + 1 == REQUIRED :
st.playSound("ItemSound.quest_middle")
st.set("cond","2")
else :
st.playSound("ItemSound.quest_itemget")
return
# Quest class and state definition
QUEST = Quest(QUEST_NUMBER, str(QUEST_NUMBER)+"_"+QUEST_NAME, QUEST_DESCRIPTION)
CREATED = State('Start', QUEST)
STARTED = State('Started', QUEST)
COMPLETED = State('Completed', QUEST)
QUEST.setInitialState(CREATED)
# Quest NPC starter initialization
QUEST.addStartNpc(OLTLIN)
# Quest initialization
CREATED.addTalkId(OLTLIN)
STARTED.addTalkId(OLTLIN)
for i in MOBS :
STARTED.addKillId(i)
STARTED.addQuestDrop(i,SN_SCALE,1)
print str(QUEST_NUMBER)+": "+QUEST_DESCRIPTION
|
adambyram/bite-project-mod | refs/heads/master | tools/bugs/server/appengine/providers/services.py | 17 | # Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides access to services for the various providers.
TODO (jason.stredwick): Return to address the potential for bugs to have
multiple providers when addressing changes to the bug model.
Three main functions:
Crawl(provider)
Index(id)
Push(id)
"""
__author__ = ('[email protected] (Alexis O. Torres)',
'[email protected] (Jason Stredwick)')
from bugs.models.bugs import bug
from bugs.providers import config
from bugs.providers import crawler_base
from bugs.providers import indexer_base
from bugs.providers import pusher_base
from bugs.providers.provider import Provider
from bugs.providers.service import Service
class Error(Exception):
pass
class CrawlError(crawler_base.Error):
pass
class IndexError(indexer_base.Error):
pass
class InvalidIdError(Error):
pass
class ProviderNotSupportedError(Error):
pass
class PushError(pusher_base.Error):
pass
def Crawl(provider):
"""Crawl the given provider.
Args:
provider: The provider to crawl. (string)
Raises:
ProviderNotSupported: The given provider is not supported.
"""
if not provider or provider not in config.PROVIDER_MAP:
raise ProviderNotSupported('Invalid provider; %s' % provider)
crawler = config.PROVIDER_MAP[provider][Service.CRAWL]()
crawler.Crawl()
def Index(id):
"""Build index for the given provider.
Args:
id: The id for the bug the service will act on. (integer)
Raises:
InvalidIdError: Raised if the given id does not map to a bug.
ProviderNotSupported: The given provider is not supported.
"""
try:
bug_model = bug.Get(id)
except bug.InvalidIdError, e:
raise InvalidIdError(e)
provider = bug_model.provider
if not provider or provider not in config.PROVIDER_MAP:
raise ProviderNotSupported('Invalid provider; %s' % provider)
indexer = config.PROVIDER_MAP[provider][Service.INDEX]()
try:
indexer.Index(bug_model)
except indexer_base.Error, e:
raise InvalidIdError(e)
def Push(id):
"""Pushes the bug specified by the given id per the provided pusher.
Args:
id: The id for the bug the service will act on. (integer)
Raises:
InvalidIdError: Raised if the given id does not map to a bug.
ProviderNotSupported: The given provider is not supported.
"""
try:
bug_model = bug.Get(id)
except bug.InvalidIdError, e:
raise InvalidIdError(e)
provider = bug_model.provider
if not provider or provider not in config.PROVIDER_MAP:
raise ProviderNotSupported('Invalid provider; %s' % provider)
pusher = config.PROVIDER_MAP[provider][Service.PUSH](bug_model)
pusher.Push()
|
gurneyalex/odoo | refs/heads/13.0-improve_sale_coupon_perf | addons/l10n_at/__manifest__.py | 4 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (c) 2015 WT-IO-IT GmbH (https://www.wt-io-it.at)
# Mag. Wolfgang Taferner <[email protected]>
# List of contributors:
# Mag. Wolfgang Taferner <[email protected]>
# Josse Colpaert <[email protected]>
{
"name": "Austria - Accounting",
"version": "3.0",
"author": "WT-IO-IT GmbH, Wolfgang Taferner",
"website": "https://www.wt-io-it.at",
"category": "Localization",
'summary': "Austrian Standardized Charts & Tax",
"description": """
Austrian charts of accounts (Einheitskontenrahmen 2010).
==========================================================
* Defines the following chart of account templates:
* Austrian General Chart of accounts 2010
* Defines templates for VAT on sales and purchases
* Defines tax templates
* Defines fiscal positions for Austrian fiscal legislation
* Defines tax reports U1/U30
""",
"depends": [
"account",
"base_iban",
"base_vat",
],
"data": [
'data/res.country.state.csv',
'data/account_account_tag.xml',
'data/account_account_template.xml',
'data/account_chart_template.xml',
'data/account_tax_report_data.xml',
'data/account_tax_template.xml',
'data/account_fiscal_position_template.xml',
'data/account_chart_template_configure_data.xml',
],
}
|
sgmap/openfisca-france | refs/heads/master | openfisca_france/model/revenus/remplacement/rente_accident_travail.py | 1 | # -*- coding: utf-8 -*-
from openfisca_france.model.base import *
class rente_accident_travail(Variable):
value_type = float
entity = Individu
label = u"Montant mensuel de la rente d’accident du travail"
reference = u"https://www.legifrance.gouv.fr/affichCodeArticle.do?cidTexte=LEGITEXT000006073189&idArticle=LEGIARTI000006743072&dateTexte=&categorieLien=cid"
definition_period = MONTH
def formula(individu, period):
previous_year = period.start.period('year').offset(-1)
non_salarie_agricole = individu('tns_benefice_exploitant_agricole', previous_year, options=[ADD]) != 0
rente_accident_travail_salarie = individu('rente_accident_travail_salarie', period)
rente_accident_travail_exploitant_agricole = individu('rente_accident_travail_exploitant_agricole', period)
return where(non_salarie_agricole, rente_accident_travail_exploitant_agricole, rente_accident_travail_salarie)
class rente_accident_travail_salarie(Variable):
value_type = float
entity = Individu
label = u"Montant de la rente d’accident du travail pour les victimes salariées"
reference = u"https://www.legifrance.gouv.fr/affichCodeArticle.do?cidTexte=LEGITEXT000006073189&idArticle=LEGIARTI000006743072&dateTexte=&categorieLien=cid"
definition_period = MONTH
def formula(individu, period):
previous_year = period.start.period('year').offset(-1)
salarie = individu('salaire_net', previous_year, options=[ADD]) != 0
rente_accident_travail_rachat = individu('rente_accident_travail_rachat', period)
taux_incapacite = individu('taux_accident_travail', period)
rente_accident_travail_base = individu('rente_accident_travail_base', period) * salarie
rente_accident_travail_apres_rachat = individu('rente_accident_travail_apres_rachat', period)
montant_rente_accident_travail = where(rente_accident_travail_rachat != 0, rente_accident_travail_apres_rachat,
rente_accident_travail_base)
return select(
[taux_incapacite < 0.1, taux_incapacite >= 0.1],
[0, montant_rente_accident_travail / 12]
)
class rente_accident_travail_exploitant_agricole(Variable):
value_type = float
entity = Individu
label = u"Montant de la rente d’accident du travail pour les chefs d'exploitation ou d'entreprise agricole"
reference = u"https://www.legifrance.gouv.fr/affichCodeArticle.do?cidTexte=LEGITEXT000006071367&idArticle=LEGIARTI000006598097&dateTexte=&categorieLien=cid"
definition_period = MONTH
def formula(individu, period):
previous_year = period.start.period('year').offset(-1)
non_salarie_agricole = individu('tns_benefice_exploitant_agricole', previous_year, options=[ADD]) != 0
rente_accident_travail_rachat = individu('rente_accident_travail_rachat', period)
taux_incapacite = individu('taux_accident_travail', period)
rente_accident_travail_base = individu('rente_accident_travail_base', period) * non_salarie_agricole
rente_accident_travail_apres_rachat = individu('rente_accident_travail_apres_rachat', period)
montant_rente_accident_travail = where(rente_accident_travail_rachat != 0, rente_accident_travail_apres_rachat,
rente_accident_travail_base)
return select(
[taux_incapacite < 0.3, taux_incapacite >= 0.3],
[0, montant_rente_accident_travail / 12]
)
class indemnite_accident_travail(Variable):
value_type = float
entity = Individu
label = u"Indemnité selon le taux d'incapacité"
reference = u"https://www.legifrance.gouv.fr/affichCode.do?idSectionTA=LEGISCTA000006172216&cidTexte=LEGITEXT000006073189"
definition_period = MONTH
def formula(individu, period, parameters):
indem_at = parameters(period).accident_travail.rente.taux
taux_incapacite = individu('taux_accident_travail', period)
return indem_at.indemnite_accident_travail.baremes.calc(taux_incapacite * 100)
class rente_accident_travail_base(Variable):
value_type = float
entity = Individu
label = u"Montant de base de la rente d’accident du travail"
reference = u"https://www.legifrance.gouv.fr/affichCodeArticle.do?cidTexte=LEGITEXT000006073189&idArticle=LEGIARTI000006743072&dateTexte=&categorieLien=cid"
definition_period = MONTH
def formula(individu, period, parameters):
param_rente_at = parameters(period).accident_travail.rente.taux
taux_incapacite = individu('taux_accident_travail', period)
taux = param_rente_at.bareme.calc(taux_incapacite)
taux_rente_accident_travail = select([taux_incapacite < param_rente_at.taux_minimum], [0], default=taux)
rente_accident_travail_base = individu('rente_accident_travail_salaire_utile', period) * taux_rente_accident_travail
return rente_accident_travail_base
class demande_rachat(Variable):
value_type = bool
entity = Individu
label = u"La victime a demandé le rachat partiel de la rente"
definition_period = MONTH
class rente_accident_travail_apres_rachat(Variable):
value_type = float
entity = Individu
label = u"Rente d’accident du travail, reliquat suite à conversion en capital"
definition_period = MONTH
def formula(individu, period, parameters):
rente_at = parameters(period).accident_travail.rente.taux
age = max_(individu('age', period), 16)
rente_accident_travail_rachat = individu('rente_accident_travail_rachat', period)
conversion_rente_capital = rente_at.capital_representatif[age]
rente_accident_travail_base = individu('rente_accident_travail_base', period)
rente_apres_rachat = rente_accident_travail_base - (rente_accident_travail_rachat / conversion_rente_capital)
return rente_apres_rachat
class rente_accident_travail_rachat(Variable):
value_type = float
entity = Individu
label = u"Rachat de la rente d’accident du travail"
reference = u"https://www.legifrance.gouv.fr/eli/arrete/2016/12/19/AFSS1637858A/jo/texte"
definition_period = MONTH
def formula(individu, period, parameters):
rente_at = parameters(period).accident_travail.rente.taux
demande_rachat = individu('demande_rachat', period)
age = max_(individu('age', period), 16)
conversion_rente_capital = rente_at.capital_representatif[age]
rente_accident_travail_base = individu('rente_accident_travail_base', period)
rachat = (rente_accident_travail_base * conversion_rente_capital) / 4
return rachat * demande_rachat
class pcrtp_nombre_actes_assistance(Variable):
value_type = int
entity = Individu
label = u"Nombre d'actes nécessitant l'assistance d'une tierce personne"
reference = u"https://www.legifrance.gouv.fr/affichCodeArticle.do;jsessionid=6D8F7F6917ADFBDEAFE1D8A432F39229.tplgfr23s_2?idArticle=LEGIARTI000027267037&cidTexte=LEGITEXT000006073189&dateTexte=20181218"
definition_period = MONTH
class pcrtp(Variable):
value_type = float
entity = Individu
label = u"Prestation complémentaire pour recours à tierce personne (PCRTP)"
reference = u"https://www.legifrance.gouv.fr/affichCode.do?idSectionTA=LEGISCTA000006172216&cidTexte=LEGITEXT000006073189"
definition_period = MONTH
def formula(individu, period, parameters):
rente_at = parameters(period).accident_travail.rente.taux
taux_incapacite = individu('taux_accident_travail', period)
pcrtp_nombre_actes_assistance = individu('pcrtp_nombre_actes_assistance', period)
montant_pcrtp = rente_at.pcrtp[pcrtp_nombre_actes_assistance]
return montant_pcrtp * (taux_incapacite >= 0.8)
class rente_accident_travail_salaire_utile(Variable):
value_type = float
entity = Individu
label = u"Salaire utile pour calculer la rente d’accident du travail"
reference = u"https://www.legifrance.gouv.fr/affichCodeArticle.do;jsessionid=7392B9902E4B974EAE8783FAF2D69849.tplgfr30s_1?idArticle=LEGIARTI000006750376&cidTexte=LEGITEXT000006073189&dateTexte=20180823"
definition_period = MONTH
def formula(individu, period, parameters):
previous_year = period.start.period('year').offset(-1)
rente_at = parameters(period).accident_travail.rente
salaire_net = individu('salaire_net', previous_year, options=[ADD])
tns_benefice_exploitant_agricole = individu('tns_benefice_exploitant_agricole', previous_year, options=[ADD])
salaire = max_(salaire_net, tns_benefice_exploitant_agricole)
salaire_net_base = max_(rente_at.salaire_net.salaire_minimum, salaire)
coef = salaire_net_base / rente_at.salaire_net.salaire_minimum
bareme = rente_at.salaire_net.bareme.calc(coef)
return rente_at.salaire_net.salaire_minimum * bareme
|
Bunkerbewohner/azurepython3 | refs/heads/master | azurepython3/auth.py | 1 | import base64
import hashlib
import hmac
from urllib.parse import urlparse
import itertools
import requests
class SharedKeyAuthentication:
def __init__(self, account_name, account_key):
"""
Initializes the authenticator using credentials provided
"""
self.account_name = account_name
self.account_key = account_key
def auth_header(self, request : requests.Request, content_length = None):
""" Computes the value of the Authorization header, following the form "SharedKey accountname:signature" """
signature = self._signature(request, content_length)
return 'SharedKey %s:%s' % (self.account_name, self._sign(signature))
def authenticate(self, request : requests.Request, content_length = None):
""" Computes and adds the Authorization header to request """
request.headers['Authorization'] = self.auth_header(request, content_length)
def _signature(self, request : requests.Request, content_length = None):
"""
Creates the signature string for this request according to
http://msdn.microsoft.com/en-us/library/windowsazure/dd179428.aspx
"""
headers = {str(name).lower(): value for name, value in request.headers.items() if not value is None}
if content_length > 0:
headers['content-length'] = str(content_length)
# method to sign
signature = request.method.upper() + '\n'
# get headers to sign
headers_to_sign = ['content-encoding', 'content-language', 'content-length',
'content-md5', 'content-type', 'date', 'if-modified-since',
'if-match', 'if-none-match', 'if-unmodified-since', 'range']
signature += "\n".join(headers.get(h, '') for h in headers_to_sign) + "\n"
# get x-ms header to sign
signature += ''.join("%s:%s\n" % (k, v) for k, v in sorted(headers.items()) if v and 'x-ms' in k)
# get account_name and uri path to sign
signature += '/' + self.account_name + urlparse(request.url).path
# get query string to sign
signature += ''.join("\n%s:%s" % (k, v) for k, v in sorted(request.params.items()) if v)
return signature
def _sign(self, string):
" Signs given string using SHA256 with the account key. Returns the base64 encoded signature. "
decode_account_key = base64.b64decode(self.account_key)
signed_hmac_sha256 = hmac.HMAC(decode_account_key, string.encode('utf-8'), hashlib.sha256)
digest = signed_hmac_sha256.digest()
return base64.b64encode(digest).decode('utf-8') |
EricYangzhiHong/python-docs-samples | refs/heads/master | datastore/ndb/transactions/appengine_config.py | 129 | """
`appengine_config.py` is automatically loaded when Google App Engine
starts a new instance of your application. This runs before any
WSGI applications specified in app.yaml are loaded.
"""
from google.appengine.ext import vendor
# Third-party libraries are stored in "lib", vendoring will make
# sure that they are importable by the application.
vendor.add('lib')
|
flgiordano/netcash | refs/heads/master | +/google-cloud-sdk/lib/surface/test/android/locales/list.py | 1 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The 'gcloud test android locales list' command."""
from googlecloudsdk.api_lib.test import util
from googlecloudsdk.calliope import base
class List(base.ListCommand):
"""List all Android locales available for testing internationalized apps."""
# TODO(user): add command examples with --filter when it is available
@staticmethod
def Args(parser):
"""Method called by Calliope to register flags for this command.
Args:
parser: An argparse parser used to add arguments that follow this
command in the CLI. Positional arguments are allowed.
"""
pass
def Run(self, args):
"""Run the 'gcloud test android locales list' command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation (i.e. group and command arguments combined).
Returns:
The list of Android locales we want to have printed later.
"""
catalog = util.GetAndroidCatalog(self.context)
return catalog.runtimeConfiguration.locales
def Collection(self, unused_args):
"""Choose the default resource collection key used to list Android locales.
Returns:
A collection string used as a key to select the default ResourceInfo
from core.resources.resource_registry.RESOURCE_REGISTRY.
"""
return 'test.android.locales'
|
baloan/mt-krpc | refs/heads/master | krpc/missions/__init__.py | 12133432 | |
DemocracyClub/Website | refs/heads/master | democracy_club/apps/report_2019_general_election/__init__.py | 12133432 | |
berinhard/newfies-dialer | refs/heads/master | newfies/dialer_campaign/templatetags/__init__.py | 12133432 | |
ybbaigo/deeptext | refs/heads/master | deeptext/models/__init__.py | 12133432 | |
okfish/django-oscar | refs/heads/master | src/oscar/apps/promotions/migrations/__init__.py | 12133432 | |
bikash/omg-monitor | refs/heads/master | monitor/streams/__init__.py | 12133432 | |
stephen144/odoo | refs/heads/9.0 | addons/website_sale_digital/controllers/__init__.py | 78 | # -*- encoding: utf-8 -*-
import main
|
vortex-ape/scikit-learn | refs/heads/master | examples/model_selection/plot_learning_curve.py | 5 | """
========================
Plotting Learning Curves
========================
On the left side the learning curve of a naive Bayes classifier is shown for
the digits dataset. Note that the training score and the cross-validation score
are both not very good at the end. However, the shape of the curve can be found
in more complex datasets very often: the training score is very high at the
beginning and decreases and the cross-validation score is very low at the
beginning and increases. On the right side we see the learning curve of an SVM
with RBF kernel. We can see clearly that the training score is still around
the maximum and the validation score could be increased with more training
samples.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.naive_bayes import GaussianNB
from sklearn.svm import SVC
from sklearn.datasets import load_digits
from sklearn.model_selection import learning_curve
from sklearn.model_selection import ShuffleSplit
def plot_learning_curve(estimator, title, X, y, ylim=None, cv=None,
n_jobs=None, train_sizes=np.linspace(.1, 1.0, 5)):
"""
Generate a simple plot of the test and training learning curve.
Parameters
----------
estimator : object type that implements the "fit" and "predict" methods
An object of that type which is cloned for each validation.
title : string
Title for the chart.
X : array-like, shape (n_samples, n_features)
Training vector, where n_samples is the number of samples and
n_features is the number of features.
y : array-like, shape (n_samples) or (n_samples, n_features), optional
Target relative to X for classification or regression;
None for unsupervised learning.
ylim : tuple, shape (ymin, ymax), optional
Defines minimum and maximum yvalues plotted.
cv : int, cross-validation generator or an iterable, optional
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- An object to be used as a cross-validation generator.
- An iterable yielding train/test splits.
For integer/None inputs, if ``y`` is binary or multiclass,
:class:`StratifiedKFold` used. If the estimator is not a classifier
or if ``y`` is neither binary nor multiclass, :class:`KFold` is used.
Refer :ref:`User Guide <cross_validation>` for the various
cross-validators that can be used here.
n_jobs : int or None, optional (default=None)
Number of jobs to run in parallel.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
train_sizes : array-like, shape (n_ticks,), dtype float or int
Relative or absolute numbers of training examples that will be used to
generate the learning curve. If the dtype is float, it is regarded as a
fraction of the maximum size of the training set (that is determined
by the selected validation method), i.e. it has to be within (0, 1].
Otherwise it is interpreted as absolute sizes of the training sets.
Note that for classification the number of samples usually have to
be big enough to contain at least one sample from each class.
(default: np.linspace(0.1, 1.0, 5))
"""
plt.figure()
plt.title(title)
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel("Training examples")
plt.ylabel("Score")
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y, cv=cv, n_jobs=n_jobs, train_sizes=train_sizes)
train_scores_mean = np.mean(train_scores, axis=1)
train_scores_std = np.std(train_scores, axis=1)
test_scores_mean = np.mean(test_scores, axis=1)
test_scores_std = np.std(test_scores, axis=1)
plt.grid()
plt.fill_between(train_sizes, train_scores_mean - train_scores_std,
train_scores_mean + train_scores_std, alpha=0.1,
color="r")
plt.fill_between(train_sizes, test_scores_mean - test_scores_std,
test_scores_mean + test_scores_std, alpha=0.1, color="g")
plt.plot(train_sizes, train_scores_mean, 'o-', color="r",
label="Training score")
plt.plot(train_sizes, test_scores_mean, 'o-', color="g",
label="Cross-validation score")
plt.legend(loc="best")
return plt
digits = load_digits()
X, y = digits.data, digits.target
title = "Learning Curves (Naive Bayes)"
# Cross validation with 100 iterations to get smoother mean test and train
# score curves, each time with 20% data randomly selected as a validation set.
cv = ShuffleSplit(n_splits=100, test_size=0.2, random_state=0)
estimator = GaussianNB()
plot_learning_curve(estimator, title, X, y, ylim=(0.7, 1.01), cv=cv, n_jobs=4)
title = "Learning Curves (SVM, RBF kernel, $\gamma=0.001$)"
# SVC is more expensive so we do a lower number of CV iterations:
cv = ShuffleSplit(n_splits=10, test_size=0.2, random_state=0)
estimator = SVC(gamma=0.001)
plot_learning_curve(estimator, title, X, y, (0.7, 1.01), cv=cv, n_jobs=4)
plt.show()
|
nanshihui/PocCollect | refs/heads/master | component/bash/t.py | 22 | #!/usr/bin/env python
# encoding: utf-8
class T(object):
def __init__(self):
self.result = {
'type': None,
'version': None,
}
self.keywords = []
self.versions = []
def match_rule(self,head='',context='',ip='',port='',productname={},keywords='',hackinfo='', **kw):
##
#head 返回的请求头
#context 返回请求正文html代码
#ip 请求ip
#port 请求端口
#productname 请求的组件产品
#keywords 暂时已知的关键词组件
#hackinfo 备用字段
return True
def verify(self,head='',context='',ip='',port='',productname={},keywords='',hackinfo=''):
result = {}
result['result']=False
return result
def attack(self,head='',context='',ip='',port='',productname={},keywords='',hackinfo=''):
result = {}
result['result']=False
return result
def parse_output(self, result):
result = {}
result['result']=False
return result |
Catch-up-TV-and-More/plugin.video.catchuptvandmore | refs/heads/master | resources/lib/channels/wo/qvc.py | 1 | # -*- coding: utf-8 -*-
# Copyright: (c) 2018, SylvainCecchetto
# GNU General Public License v2.0+ (see LICENSE.txt or https://www.gnu.org/licenses/gpl-2.0.txt)
# This file is part of Catch-up TV & More
from __future__ import unicode_literals
import json
import re
from codequick import Resolver, Script
import urlquick
# Live
URL_LIVE_QVC_IT = 'https://www.qvc.%s/tv/live.html'
# language
URL_LIVE_QVC_JP = 'https://qvc.jp/content/shop-live-tv.html'
URL_LIVE_QVC_DE_UK_US = 'http://www.qvc%s/content/shop-live-tv.qvc.html'
# language
URL_STREAM_LIMELIGHT = 'http://production-ps.lvp.llnw.net/r/PlaylistService/media/%s/getMobilePlaylistByMediaId'
# MediaId
DESIRED_LANGUAGE = Script.setting['qvc.language']
@Resolver.register
def get_live_url(plugin, item_id, **kwargs):
final_language = kwargs.get('language', Script.setting['qvc.language'])
if final_language == 'IT':
resp = urlquick.get(URL_LIVE_QVC_IT % final_language.lower())
live_id = re.compile(r'data-media="(.*?)"').findall(resp.text)[0]
live_datas_json = urlquick.get(URL_STREAM_LIMELIGHT % live_id)
json_parser = json.loads(live_datas_json.text)
stream_url = ''
for live_datas in json_parser["mediaList"][0]["mobileUrls"]:
if live_datas["targetMediaPlatform"] == "HttpLiveStreaming":
stream_url = live_datas["mobileUrl"]
return stream_url
if final_language == 'JP':
resp = urlquick.get(URL_LIVE_QVC_JP)
resp.encoding = "shift_jis"
return 'https:' + re.compile(r'url\"\:\"(.*?)\"').findall(resp.text)[0]
if final_language == 'DE':
resp = urlquick.get(URL_LIVE_QVC_DE_UK_US % '.de')
live_datas_json = re.compile(r'oLiveStreams=(.*?)}},').findall(resp.text)[0] + '}}'
json_parser = json.loads(live_datas_json)
return 'http:' + json_parser["QVC"]["url"]
if final_language == 'UK':
resp = urlquick.get(URL_LIVE_QVC_DE_UK_US % 'uk.com')
live_datas_json = re.compile(r'oLiveStreams=(.*?)}},').findall(resp.text)[0] + '}}'
json_parser = json.loads(live_datas_json)
return 'http:' + json_parser["QVC"]["url"]
# Use US by default
resp = urlquick.get(URL_LIVE_QVC_DE_UK_US % '.com')
live_datas_json = re.compile(r'oLiveStreams=(.*?)}},').findall(resp.text)[0] + '}}'
json_parser = json.loads(live_datas_json)
return 'http:' + json_parser["QVC"]["url"]
|
njvack/masterfile | refs/heads/main | masterfile/scripts/pretty.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the masterfile package: https://github.com/njvack/masterfile
# Copyright (c) 2020 Board of Regents of the University of Wisconsin System
# Written by Nate Vack <[email protected]> at the Center for Healthy Minds
# at the University of Wisconsin-Madison.
# Released under MIT licence; see LICENSE at the package root.
""" Re-format the masterfile dictionary to contain every masterfile column
Usage: make_pretty_dictionary [options] <masterfile_path> <outfile>
This is useful for feeding into Excel and browsing or searching around.
Use '-' as outfile to write to stdout.
Options:
-v, --verbose Display debugging output
"""
from __future__ import absolute_import, unicode_literals
import sys
from itertools import chain
import pandas
import masterfile
from masterfile.masterfile import LINE_ENDING
import logging
logging.basicConfig(level=logging.DEBUG, format='%(message)s')
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def column_to_tuples(colname, components):
column_parts = colname.split('_')
return [
(colname, '_'.join(dict_key)) for dict_key in zip(
components, column_parts)
]
def columns_to_tuples(columns, components):
return list(chain(*[column_to_tuples(c, components) for c in columns]))
def columns_to_index(columns, components):
return pandas.MultiIndex.from_tuples(
columns_to_tuples(columns, components))
def populate_pretty_df(df, mf):
d = mf.dictionary
for index_val, dict_entries in d.df.iterrows():
logger.debug('Working on {}'.format(index_val))
key = '_'.join(index_val)
for dict_col, value in dict_entries.iteritems():
try:
df.loc[pandas.IndexSlice[:, key], dict_col] = value
except KeyError:
logger.warn(
'Dictionary contains unused entry {}'.format(index_val))
def allocate_pretty_df(mf):
ix = columns_to_index(mf.df.columns, mf.components)
cols = mf.dictionary.columns
return pandas.DataFrame(index=ix, columns=cols, dtype=object)
def write_pretty_dictionary(mf_path, output):
mf = masterfile.load(mf_path)
pretty_df = allocate_pretty_df(mf)
pretty_df = pretty_df[~pretty_df.index.duplicated(keep='first')]
original_index = pretty_df.index
pretty_df.sort_index(inplace=True)
populate_pretty_df(pretty_df, mf)
reindexed = pretty_df.reindex(index=original_index)
reindexed.to_csv(output, line_terminator=LINE_ENDING)
return 0
def main(args):
if args.verbose:
logger.setLevel(logging.DEBUG)
logger.debug(args)
output = args.out_file
if output == '-':
output = sys.stdout
return write_pretty_dictionary(args.masterfile_path, output)
if __name__ == '__main__':
sys.exit(main())
|
gautam1858/tensorflow | refs/heads/master | tensorflow/contrib/boosted_trees/python/utils/losses.py | 6 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Losses for Gtflow Estimator and Batch Estimator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops.losses import losses
def per_example_squared_hinge_loss(labels, weights, predictions):
loss = losses.hinge_loss(labels=labels, logits=predictions, weights=weights)
return math_ops.square(loss), control_flow_ops.no_op()
def per_example_logistic_loss(labels, weights, predictions):
"""Logistic loss given labels, example weights and predictions.
Args:
labels: Rank 2 (N, 1) tensor of per-example labels.
weights: Rank 2 (N, 1) tensor of per-example weights.
predictions: Rank 2 (N, 1) tensor of per-example predictions.
Returns:
loss: A Rank 2 (N, 1) tensor of per-example logistic loss.
update_op: An update operation to update the loss's internal state.
"""
labels = math_ops.to_float(labels)
unweighted_loss = nn.sigmoid_cross_entropy_with_logits(
labels=labels, logits=predictions)
return unweighted_loss * weights, control_flow_ops.no_op()
# MUST USE WITH HESSIAN REGULARIZATION,
# This loss can have zero hessian, so it must be used with l2 or min_node_weight
# regularization.
# An example config is
# learner_config.constraints.min_node_weight = 1 / num_examples_per_layer
# learner_config.regularization.l2 = 1.0 / num_examples_per_layer
# TODO(nponomareva): make it multidimensional so we can estimate several
# quantiles at once.
def per_example_quantile_regression_loss(labels, weights, predictions,
quantile):
"""Smoothed loss for quantile regression.
The standard quantile regression loss is quantile*(y-y') when y>y' and
(quantile-1)*(y-y') otherwise, y' is a prediction, y is a label. The impl
below is this loss but squared in the region where the loss value < 1.
Args:
labels: Rank 2 (N, D) tensor of per-example labels.
weights: Rank 2 (N, 1) tensor of per-example weights.
predictions: Rank 2 (N, D) tensor of per-example predictions.
quantile: The quantile to use.
Returns:
loss: A Rank 2 (N, 1) tensor of per-example quantile loss.
update_op: An update operation to update the loss's internal state.
"""
labels = math_ops.to_float(labels)
error = labels - predictions
square_loss_right = array_ops.where(error * quantile < 1.0,
math_ops.square(quantile * error),
quantile * error)
square_loss_left = array_ops.where(error * (quantile - 1) < 1,
math_ops.square((quantile - 1) * error),
(quantile - 1) * error)
unweighted_loss = array_ops.where(error > 0, square_loss_right,
square_loss_left)
if weights is None:
return unweighted_loss, control_flow_ops.no_op()
else:
return unweighted_loss * weights, control_flow_ops.no_op()
# This is classical form of Maximum entropy loss, that is twice differentiable
# (sparse_softmax_cross_entropy which is what we go for is not twice
# differentiable).
def per_example_maxent_loss(labels, weights, logits, num_classes, eps=1e-15):
"""Maximum entropy loss for multiclass problems.
Maximum entropy is a generalization of logistic loss for the case when more
than 2 classes are present.
Args:
labels: Rank 2 (N, 1) or Rank 1 (N) tensor of per-example labels.
weights: Rank 2 (N, 1) tensor of per-example weights.
logits: Rank 2 (N, K) tensor of per-example predictions, K - num of
classes.
num_classes: number of classes in classification task. Used to expand label
indices into one-hot encodings.
eps: tolerance, used as a minimum possible value.
Returns:
loss: A Rank 2 (N, 1) tensor of per-example maxent loss
update_op: An update operation to update the loss's internal state.
"""
labels = math_ops.to_int64(labels)
# If labels are of rank 1, make them rank 2.
labels_shape = labels.get_shape()
if len(labels_shape) != 2:
labels = array_ops.expand_dims(labels, 1)
# Labels are indices of classes, convert them to one hot encodings.
target_one_hot = array_ops.one_hot(indices=labels, depth=num_classes)
labels = math_ops.reduce_sum(input_tensor=target_one_hot, axis=[1])
labels = math_ops.to_float(labels)
# Calculate softmax probabilities for each class.
unnormalized_probs = math_ops.exp(logits)
normalizers = math_ops.reduce_sum(unnormalized_probs, 1, keepdims=True)
softmax_predictions = math_ops.divide(unnormalized_probs,
math_ops.add(normalizers, eps))
# Pull out the probabilities for real label.
probs_for_real_class = math_ops.reduce_sum(labels * softmax_predictions, 1)
# Add handling for values near 0 and 1.
zeros = array_ops.zeros_like(probs_for_real_class, dtype=logits.dtype) + eps
one_minus_eps = array_ops.ones_like(
probs_for_real_class, dtype=logits.dtype) - eps
# Take maximum(eps, pred)
cond = (probs_for_real_class >= eps)
probs_for_real_class = array_ops.where(cond, probs_for_real_class, zeros)
# Take minimum(1-eps, pred)
cond = (probs_for_real_class <= 1 - eps)
probs_for_real_class = array_ops.where(cond, probs_for_real_class,
one_minus_eps)
unweighted_loss = array_ops.expand_dims(-math_ops.log(probs_for_real_class),
1)
if weights is None:
return unweighted_loss, control_flow_ops.no_op()
else:
return unweighted_loss * weights, control_flow_ops.no_op()
def per_example_squared_loss(labels, weights, predictions):
"""Squared loss given labels, example weights and predictions.
Args:
labels: Rank 2 (N, D) tensor of per-example labels.
weights: Rank 2 (N, 1) tensor of per-example weights.
predictions: Rank 2 (N, D) tensor of per-example predictions.
Returns:
loss: A Rank 2 (N, 1) tensor of per-example squared loss.
update_op: An update operation to update the loss's internal state.
"""
unweighted_loss = math_ops.reduce_sum(
math_ops.square(predictions - labels), 1, keepdims=True)
return unweighted_loss * weights, control_flow_ops.no_op()
def per_example_exp_loss(labels, weights, predictions, name=None, eps=0.1):
"""Trimmed exponential loss given labels, example weights and predictions.
Note that this is only for binary classification.
If logistic loss tries to make sure that the classifier is certain of its
predictions, exp loss says: "as long as it got it correct, even barely, i
don't care". Can be used on noisy data, or when you don't care about getting
the actual probabilities from the model, just the correct label.
The loss returns is exp(-targets*modified_predictions), where
modified_predictions are 1 if sigmoid is >= 0.5+eps (eg we predict positive
class), -1 if sigmoid < 0.5-eps (e.g. we predict negative class) and ax+b in
the interval 0.5-eps, 0.5+eps, where a = 1/eps, b=1/(2eps).
Args:
labels: Rank 2 (N, D) tensor of per-example labels.
weights: Rank 2 (N, 1) tensor of per-example weights.
predictions: Rank 2 (N, D) tensor of per-example predictions.
name: A name for the operation (optional).
eps: For the range (0.5-eps, 0.5+eps) we set the predictions to be ax+b.
Returns:
loss: A Rank 2 (N, 1) tensor of per-example exp loss
update_op: An update operation to update the loss's internal state.
"""
def exp_with_logits(name, eps, labels=None, logits=None):
"""Computes exponential loss given `logits`.
The loss returns is exp(-targets*modified_predictions), where
modified_predictions are 1 if sigmoid is >= 0.5+eps (eg we predict positive
class), -1 if sigmoid < 0.5-eps (e.g. we predict negative class) and ax+b in
the interval 0.5-eps, 0.5+eps, where a = 1/eps, b=1/(2eps).
Args:
name: A name for the operation (optional).
eps: For the range (0.5-eps, 0.5+eps) we set the predictions to be ax+b.
labels: A `Tensor` of the same type and shape as `logits`.
logits: A `Tensor` of type `float32` or `float64`.
Returns:
A `Tensor` of the same shape as `logits` with the componentwise
exponential losses.
Raises:
ValueError: If `logits` and `labels` do not have the same shape.
"""
with ops.name_scope(name, "exp_loss", [logits, labels]) as name:
logits = ops.convert_to_tensor(logits, name="logits")
labels = ops.convert_to_tensor(labels, name="labels")
try:
labels.get_shape().merge_with(logits.get_shape())
except ValueError:
raise ValueError("logits and labels must have the same shape (%s vs %s)"
% (logits.get_shape(), labels.get_shape()))
# Default threshold to switch between classes
zeros = array_ops.zeros_like(logits, dtype=logits.dtype)
ones = array_ops.ones_like(logits, dtype=logits.dtype)
neg_ones = -array_ops.ones_like(logits, dtype=logits.dtype)
# Convert labels to 1 and -1
cond_labels = (labels > zeros)
labels_converted = array_ops.where(cond_labels, ones, neg_ones)
# Convert predictions to 1 and -1
# The loss we build is min(1, max(-1,ax+b))
# where a=1/eps, b=-1/2eps.
a = 1.0 / eps
b = -1.0 / 2 / eps
probs = math_ops.sigmoid(logits)
y = a * probs + b
# Build max(-1, ax+b)
cond = (y < -1)
max_res = array_ops.where(cond, neg_ones, y)
# Build min part
cond = (max_res > 1)
min_res = array_ops.where(cond, ones, max_res)
preds_converted = min_res
return math_ops.exp(-preds_converted * labels_converted)
labels = math_ops.to_float(labels)
unweighted_loss = exp_with_logits(
name=name, eps=eps, labels=labels, logits=predictions)
return unweighted_loss * weights, control_flow_ops.no_op()
def per_example_full_exp_loss(labels, weights, predictions, name=None):
"""Full exponential loss given labels, example weights and predictions.
Note that this is only for binary classification.
The loss returns is exp(-targets*logits), where targets are converted to -1
and 1.
Args:
labels: Rank 2 (N, D) tensor of per-example labels.
weights: Rank 2 (N, 1) tensor of per-example weights.
predictions: Rank 2 (N, D) tensor of per-example predictions.
name: A name for the operation (optional).
Returns:
loss: A Rank 2 (N, 1) tensor of per-example exp loss
update_op: An update operation to update the loss's internal state.
"""
def full_exp_with_logits(name, labels=None, logits=None):
"""Computes exponential loss given `logits`.
Args:
name: A name for the operation (optional).
labels: A `Tensor` of the same type and shape as `logits`.
logits: A `Tensor` of type `float32` or `float64`.
Returns:
A `Tensor` of the same shape as `logits` with the componentwise
exponential losses.
Raises:
ValueError: If `logits` and `labels` do not have the same shape.
"""
with ops.name_scope(name, "exp_loss", [logits, labels]) as name:
logits = ops.convert_to_tensor(logits, name="logits")
labels = ops.convert_to_tensor(labels, name="labels")
try:
labels.get_shape().merge_with(logits.get_shape())
except ValueError:
raise ValueError("logits and labels must have the same shape (%s vs %s)"
% (logits.get_shape(), labels.get_shape()))
# Default threshold of 0 to switch between classes
zeros = array_ops.zeros_like(logits, dtype=logits.dtype)
ones = array_ops.ones_like(logits, dtype=logits.dtype)
neg_ones = -array_ops.ones_like(logits, dtype=logits.dtype)
# Convert labels to 1 and -1
cond_labels = (labels > zeros)
labels_converted = array_ops.where(cond_labels, ones, neg_ones)
return math_ops.exp(-1.0 * logits * labels_converted)
labels = math_ops.to_float(labels)
unweighted_loss = full_exp_with_logits(
name=name, labels=labels, logits=predictions)
return unweighted_loss * weights, control_flow_ops.no_op()
|
sander76/home-assistant | refs/heads/dev | homeassistant/components/dte_energy_bridge/sensor.py | 5 | """Support for monitoring energy usage using the DTE energy bridge."""
import logging
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import CONF_NAME, HTTP_OK
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_IP_ADDRESS = "ip"
CONF_VERSION = "version"
DEFAULT_NAME = "Current Energy Usage"
DEFAULT_VERSION = 1
ICON = "mdi:flash"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_IP_ADDRESS): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_VERSION, default=DEFAULT_VERSION): vol.All(
vol.Coerce(int), vol.Any(1, 2)
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the DTE energy bridge sensor."""
name = config[CONF_NAME]
ip_address = config[CONF_IP_ADDRESS]
version = config[CONF_VERSION]
add_entities([DteEnergyBridgeSensor(ip_address, name, version)], True)
class DteEnergyBridgeSensor(SensorEntity):
"""Implementation of the DTE Energy Bridge sensors."""
def __init__(self, ip_address, name, version):
"""Initialize the sensor."""
self._version = version
if self._version == 1:
self._url = f"http://{ip_address}/instantaneousdemand"
elif self._version == 2:
self._url = f"http://{ip_address}:8888/zigbee/se/instantaneousdemand"
self._name = name
self._unit_of_measurement = "kW"
self._state = None
@property
def name(self):
"""Return the name of th sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the energy usage data from the DTE energy bridge."""
try:
response = requests.get(self._url, timeout=5)
except (requests.exceptions.RequestException, ValueError):
_LOGGER.warning(
"Could not update status for DTE Energy Bridge (%s)", self._name
)
return
if response.status_code != HTTP_OK:
_LOGGER.warning(
"Invalid status_code from DTE Energy Bridge: %s (%s)",
response.status_code,
self._name,
)
return
response_split = response.text.split()
if len(response_split) != 2:
_LOGGER.warning(
'Invalid response from DTE Energy Bridge: "%s" (%s)',
response.text,
self._name,
)
return
val = float(response_split[0])
# A workaround for a bug in the DTE energy bridge.
# The returned value can randomly be in W or kW. Checking for a
# a decimal seems to be a reliable way to determine the units.
# Limiting to version 1 because version 2 apparently always returns
# values in the format 000000.000 kW, but the scaling is Watts
# NOT kWatts
if self._version == 1 and "." in response_split[0]:
self._state = val
else:
self._state = val / 1000
|
xu-c/flasky | refs/heads/master | app/main/errors.py | 149 | from flask import render_template, request, jsonify
from . import main
@main.app_errorhandler(403)
def forbidden(e):
if request.accept_mimetypes.accept_json and \
not request.accept_mimetypes.accept_html:
response = jsonify({'error': 'forbidden'})
response.status_code = 403
return response
return render_template('403.html'), 403
@main.app_errorhandler(404)
def page_not_found(e):
if request.accept_mimetypes.accept_json and \
not request.accept_mimetypes.accept_html:
response = jsonify({'error': 'not found'})
response.status_code = 404
return response
return render_template('404.html'), 404
@main.app_errorhandler(500)
def internal_server_error(e):
if request.accept_mimetypes.accept_json and \
not request.accept_mimetypes.accept_html:
response = jsonify({'error': 'internal server error'})
response.status_code = 500
return response
return render_template('500.html'), 500
|
bashu/django-uncharted | refs/heads/develop | uncharted/__init__.py | 279 | __version__ = '0.0.1'
|
livingbio/libsaas | refs/heads/master | libsaas/services/flurry/service.py | 4 | from libsaas.services import base
from .resource import Application, Applications
class Flurry(base.Resource):
"""
"""
def __init__(self, api_access_code):
"""
Create a Flurry service.
:var api_access_code: The API access code.
:vartype api_access_code: str
"""
self.apiroot = 'http://api.flurry.com'
self.api_access_code = api_access_code
self.add_filter(self.add_authorization)
self.add_filter(self.add_json)
def add_authorization(self, request):
request.params['apiAccessCode'] = self.api_access_code
def add_json(self, request):
request.headers['Content-Type'] = 'application/json'
def get_url(self):
return self.apiroot
@base.resource(Applications)
def applications(self):
"""
Return the resource corresponding to all applications.
"""
return Applications(self)
@base.resource(Application)
def application(self, application_api_key):
"""
Returns the resource corresponding to a single application.
"""
return Application(self, application_api_key)
|
jjmleiro/hue | refs/heads/master | desktop/core/ext-py/requests-2.10.0/requests/packages/chardet/chardistribution.py | 2754 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .euctwfreq import (EUCTWCharToFreqOrder, EUCTW_TABLE_SIZE,
EUCTW_TYPICAL_DISTRIBUTION_RATIO)
from .euckrfreq import (EUCKRCharToFreqOrder, EUCKR_TABLE_SIZE,
EUCKR_TYPICAL_DISTRIBUTION_RATIO)
from .gb2312freq import (GB2312CharToFreqOrder, GB2312_TABLE_SIZE,
GB2312_TYPICAL_DISTRIBUTION_RATIO)
from .big5freq import (Big5CharToFreqOrder, BIG5_TABLE_SIZE,
BIG5_TYPICAL_DISTRIBUTION_RATIO)
from .jisfreq import (JISCharToFreqOrder, JIS_TABLE_SIZE,
JIS_TYPICAL_DISTRIBUTION_RATIO)
from .compat import wrap_ord
ENOUGH_DATA_THRESHOLD = 1024
SURE_YES = 0.99
SURE_NO = 0.01
MINIMUM_DATA_THRESHOLD = 3
class CharDistributionAnalysis:
def __init__(self):
# Mapping table to get frequency order from char order (get from
# GetOrder())
self._mCharToFreqOrder = None
self._mTableSize = None # Size of above table
# This is a constant value which varies from language to language,
# used in calculating confidence. See
# http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
# for further detail.
self._mTypicalDistributionRatio = None
self.reset()
def reset(self):
"""reset analyser, clear any state"""
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
self._mTotalChars = 0 # Total characters encountered
# The number of characters whose frequency order is less than 512
self._mFreqChars = 0
def feed(self, aBuf, aCharLen):
"""feed a character with known length"""
if aCharLen == 2:
# we only care about 2-bytes character in our distribution analysis
order = self.get_order(aBuf)
else:
order = -1
if order >= 0:
self._mTotalChars += 1
# order is valid
if order < self._mTableSize:
if 512 > self._mCharToFreqOrder[order]:
self._mFreqChars += 1
def get_confidence(self):
"""return confidence based on existing data"""
# if we didn't receive any character in our consideration range,
# return negative answer
if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:
return SURE_NO
if self._mTotalChars != self._mFreqChars:
r = (self._mFreqChars / ((self._mTotalChars - self._mFreqChars)
* self._mTypicalDistributionRatio))
if r < SURE_YES:
return r
# normalize confidence (we don't want to be 100% sure)
return SURE_YES
def got_enough_data(self):
# It is not necessary to receive all data to draw conclusion.
# For charset detection, certain amount of data is enough
return self._mTotalChars > ENOUGH_DATA_THRESHOLD
def get_order(self, aBuf):
# We do not handle characters based on the original encoding string,
# but convert this encoding string to a number, here called order.
# This allows multiple encodings of a language to share one frequency
# table.
return -1
class EUCTWDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = EUCTWCharToFreqOrder
self._mTableSize = EUCTW_TABLE_SIZE
self._mTypicalDistributionRatio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-TW encoding, we are interested
# first byte range: 0xc4 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char = wrap_ord(aBuf[0])
if first_char >= 0xC4:
return 94 * (first_char - 0xC4) + wrap_ord(aBuf[1]) - 0xA1
else:
return -1
class EUCKRDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = EUCKRCharToFreqOrder
self._mTableSize = EUCKR_TABLE_SIZE
self._mTypicalDistributionRatio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-KR encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char = wrap_ord(aBuf[0])
if first_char >= 0xB0:
return 94 * (first_char - 0xB0) + wrap_ord(aBuf[1]) - 0xA1
else:
return -1
class GB2312DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = GB2312CharToFreqOrder
self._mTableSize = GB2312_TABLE_SIZE
self._mTypicalDistributionRatio = GB2312_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for GB2312 encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if (first_char >= 0xB0) and (second_char >= 0xA1):
return 94 * (first_char - 0xB0) + second_char - 0xA1
else:
return -1
class Big5DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = Big5CharToFreqOrder
self._mTableSize = BIG5_TABLE_SIZE
self._mTypicalDistributionRatio = BIG5_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for big5 encoding, we are interested
# first byte range: 0xa4 -- 0xfe
# second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if first_char >= 0xA4:
if second_char >= 0xA1:
return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
else:
return 157 * (first_char - 0xA4) + second_char - 0x40
else:
return -1
class SJISDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = JISCharToFreqOrder
self._mTableSize = JIS_TABLE_SIZE
self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for sjis encoding, we are interested
# first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
# second byte range: 0x40 -- 0x7e, 0x81 -- oxfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if (first_char >= 0x81) and (first_char <= 0x9F):
order = 188 * (first_char - 0x81)
elif (first_char >= 0xE0) and (first_char <= 0xEF):
order = 188 * (first_char - 0xE0 + 31)
else:
return -1
order = order + second_char - 0x40
if second_char > 0x7F:
order = -1
return order
class EUCJPDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = JISCharToFreqOrder
self._mTableSize = JIS_TABLE_SIZE
self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-JP encoding, we are interested
# first byte range: 0xa0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
char = wrap_ord(aBuf[0])
if char >= 0xA0:
return 94 * (char - 0xA1) + wrap_ord(aBuf[1]) - 0xa1
else:
return -1
|
lundjordan/services | refs/heads/master | src/tooltool/api/tooltool_api/flask.py | 2 | # -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import tooltool_api
app = tooltool_api.create_app()
|
nzavagli/UnrealPy | refs/heads/master | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/django-1.8.2/django/conf/urls/i18n.py | 113 | import warnings
from django.conf import settings
from django.conf.urls import patterns, url
from django.core.urlresolvers import LocaleRegexURLResolver
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from django.views.i18n import set_language
def i18n_patterns(prefix, *args):
"""
Adds the language code prefix to every URL pattern within this
function. This may only be used in the root URLconf, not in an included
URLconf.
"""
if isinstance(prefix, six.string_types):
warnings.warn(
"Calling i18n_patterns() with the `prefix` argument and with tuples "
"instead of django.conf.urls.url() instances is deprecated and "
"will no longer work in Django 2.0. Use a list of "
"django.conf.urls.url() instances instead.",
RemovedInDjango20Warning, stacklevel=2
)
pattern_list = patterns(prefix, *args)
else:
pattern_list = [prefix] + list(args)
if not settings.USE_I18N:
return pattern_list
return [LocaleRegexURLResolver(pattern_list)]
urlpatterns = [
url(r'^setlang/$', set_language, name='set_language'),
]
|
ivanhorvath/openshift-tools | refs/heads/prod | openshift/installer/vendored/openshift-ansible-3.7.52-1/roles/lib_openshift/library/oc_volume.py | 3 | #!/usr/bin/env python
# pylint: disable=missing-docstring
# flake8: noqa: T001
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
#
# Copyright 2016 Red Hat, Inc. and/or its affiliates
# and other contributors as indicated by the @author tags.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- -*- -*- Begin included fragment: lib/import.py -*- -*- -*-
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
from __future__ import print_function
import atexit
import copy
import json
import os
import re
import shutil
import subprocess
import tempfile
# pylint: disable=import-error
try:
import ruamel.yaml as yaml
except ImportError:
import yaml
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: doc/volume -*- -*- -*-
DOCUMENTATION = '''
---
module: oc_volume
short_description: Create, modify, and idempotently manage openshift volumes.
description:
- Modify openshift volumes programmatically.
options:
state:
description:
- State controls the action that will be taken with resource
- 'present' will create or update and object to the desired state
- 'absent' will ensure volumes are removed
- 'list' will read the volumes
default: present
choices: ["present", "absent", "list"]
aliases: []
kubeconfig:
description:
- The path for the kubeconfig file to use for authentication
required: false
default: /etc/origin/master/admin.kubeconfig
aliases: []
debug:
description:
- Turn on debug output.
required: false
default: False
aliases: []
name:
description:
- Name of the object that is being queried.
required: false
default: None
aliases: []
vol_name:
description:
- Name of the volume that is being queried.
required: false
default: None
aliases: []
namespace:
description:
- The name of the namespace where the object lives
required: false
default: default
aliases: []
kind:
description:
- The kind of object that can be managed.
default: dc
choices:
- dc
- rc
- pods
aliases: []
mount_type:
description:
- The type of volume to be used
required: false
default: None
choices:
- emptydir
- hostpath
- secret
- pvc
- configmap
aliases: []
mount_path:
description:
- The path to where the mount will be attached
required: false
default: None
aliases: []
secret_name:
description:
- The name of the secret. Used when mount_type is secret.
required: false
default: None
aliases: []
claim_size:
description:
- The size in GB of the pv claim. e.g. 100G
required: false
default: None
aliases: []
claim_name:
description:
- The name of the pv claim
required: false
default: None
aliases: []
configmap_name:
description:
- The name of the configmap
required: false
default: None
aliases: []
author:
- "Kenny Woodson <[email protected]>"
extends_documentation_fragment: []
'''
EXAMPLES = '''
- name: attach storage volumes to deploymentconfig
oc_volume:
namespace: logging
kind: dc
name: name_of_the_dc
mount_type: pvc
claim_name: loggingclaim
claim_size: 100G
vol_name: logging-storage
run_once: true
'''
# -*- -*- -*- End included fragment: doc/volume -*- -*- -*-
# -*- -*- -*- Begin included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
class YeditException(Exception): # pragma: no cover
''' Exception class for Yedit '''
pass
# pylint: disable=too-many-public-methods
class Yedit(object): # pragma: no cover
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z{}/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self,
filename=None,
content=None,
content_type='yaml',
separator='.',
backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
self.load(content_type=self.content_type)
if self.__yaml_dict is None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for separator '''
return self._separator
@separator.setter
def separator(self, inc_sep):
''' setter method for separator '''
self._separator = inc_sep
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key.format(''.join(common_separators)), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key.format(''.join(common_separators)), key):
return False
return True
@staticmethod
def remove_entry(data, key, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
data.clear()
return True
elif key == '' and isinstance(data, list):
del data[:]
return True
if not (key and Yedit.valid_key(key, sep)) and \
isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and dict_key in data and data[dict_key]: # noqa: E501
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# didn't add/update to an existing list, nor add/update key to a dict
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
return data
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
tmp_filename = filename + '.yedit'
with open(tmp_filename, 'w') as yfd:
yfd.write(contents)
os.rename(tmp_filename, filename)
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripDumper if supported.
try:
Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except AttributeError:
Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename is None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripLoader if supported.
try:
self.yaml_dict = yaml.safe_load(contents, yaml.RoundTripLoader)
except AttributeError:
self.yaml_dict = yaml.safe_load(contents)
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. {}'.format(err))
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if key_or_item in entry:
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# AUDIT:maybe-no-member makes sense due to loading data from
# a serialized format.
# pylint: disable=maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in dict with non-dict type. ' +
'value=[{}] type=[{}]'.format(value, type(value)))
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index is not None:
ind = index
if ind is not None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
# already exists, return
if ind is not None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is None:
return (False, self.yaml_dict)
# When path equals "" it is a special case.
# "" refers to the root of the document
# Only update the root path (entire document) when its a list or dict
if path == '':
if isinstance(result, list) or isinstance(result, dict):
self.yaml_dict = result
return (True, self.yaml_dict)
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is not None:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
@staticmethod
def get_curr_value(invalue, val_type):
'''return the current value'''
if invalue is None:
return None
curr_value = invalue
if val_type == 'yaml':
curr_value = yaml.load(invalue)
elif val_type == 'json':
curr_value = json.loads(invalue)
return curr_value
@staticmethod
def parse_value(inc_value, vtype=''):
'''determine value type passed'''
true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE',
'on', 'On', 'ON', ]
false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE',
'off', 'Off', 'OFF']
# It came in as a string but you didn't specify value_type as string
# we will convert to bool if it matches any of the above cases
if isinstance(inc_value, str) and 'bool' in vtype:
if inc_value not in true_bools and inc_value not in false_bools:
raise YeditException('Not a boolean type. str=[{}] vtype=[{}]'.format(inc_value, vtype))
elif isinstance(inc_value, bool) and 'str' in vtype:
inc_value = str(inc_value)
# There is a special case where '' will turn into None after yaml loading it so skip
if isinstance(inc_value, str) and inc_value == '':
pass
# If vtype is not str then go ahead and attempt to yaml load it.
elif isinstance(inc_value, str) and 'str' not in vtype:
try:
inc_value = yaml.safe_load(inc_value)
except Exception:
raise YeditException('Could not determine type of incoming value. ' +
'value=[{}] vtype=[{}]'.format(type(inc_value), vtype))
return inc_value
@staticmethod
def process_edits(edits, yamlfile):
'''run through a list of edits and process them one-by-one'''
results = []
for edit in edits:
value = Yedit.parse_value(edit['value'], edit.get('value_type', ''))
if edit.get('action') == 'update':
# pylint: disable=line-too-long
curr_value = Yedit.get_curr_value(
Yedit.parse_value(edit.get('curr_value')),
edit.get('curr_value_format'))
rval = yamlfile.update(edit['key'],
value,
edit.get('index'),
curr_value)
elif edit.get('action') == 'append':
rval = yamlfile.append(edit['key'], value)
else:
rval = yamlfile.put(edit['key'], value)
if rval[0]:
results.append({'key': edit['key'], 'edit': rval[1]})
return {'changed': len(results) > 0, 'results': results}
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(params):
'''perform the idempotent crud operations'''
yamlfile = Yedit(filename=params['src'],
backup=params['backup'],
separator=params['separator'])
state = params['state']
if params['src']:
rval = yamlfile.load()
if yamlfile.yaml_dict is None and state != 'present':
return {'failed': True,
'msg': 'Error opening file [{}]. Verify that the '.format(params['src']) +
'file exists, that it is has correct permissions, and is valid yaml.'}
if state == 'list':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['key']:
rval = yamlfile.get(params['key'])
return {'changed': False, 'result': rval, 'state': state}
elif state == 'absent':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['update']:
rval = yamlfile.pop(params['key'], params['value'])
else:
rval = yamlfile.delete(params['key'])
if rval[0] and params['src']:
yamlfile.write()
return {'changed': rval[0], 'result': rval[1], 'state': state}
elif state == 'present':
# check if content is different than what is in the file
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
# We had no edits to make and the contents are the same
if yamlfile.yaml_dict == content and \
params['value'] is None:
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
yamlfile.yaml_dict = content
# If we were passed a key, value then
# we enapsulate it in a list and process it
# Key, Value passed to the module : Converted to Edits list #
edits = []
_edit = {}
if params['value'] is not None:
_edit['value'] = params['value']
_edit['value_type'] = params['value_type']
_edit['key'] = params['key']
if params['update']:
_edit['action'] = 'update'
_edit['curr_value'] = params['curr_value']
_edit['curr_value_format'] = params['curr_value_format']
_edit['index'] = params['index']
elif params['append']:
_edit['action'] = 'append'
edits.append(_edit)
elif params['edits'] is not None:
edits = params['edits']
if edits:
results = Yedit.process_edits(edits, yamlfile)
# if there were changes and a src provided to us we need to write
if results['changed'] and params['src']:
yamlfile.write()
return {'changed': results['changed'], 'result': results['results'], 'state': state}
# no edits to make
if params['src']:
# pylint: disable=redefined-variable-type
rval = yamlfile.write()
return {'changed': rval[0],
'result': rval[1],
'state': state}
# We were passed content but no src, key or value, or edits. Return contents in memory
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
return {'failed': True, 'msg': 'Unkown state passed'}
# -*- -*- -*- End included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/base.py -*- -*- -*-
# pylint: disable=too-many-lines
# noqa: E301,E302,E303,T001
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
''' Find and return oc binary file '''
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig)
self.all_namespaces = all_namespaces
self.oc_binary = locate_oc_binary()
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, res['results'][0], separator=sep)
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''replace the current object with oc replace'''
# We are removing the 'resourceVersion' to handle
# a race condition when modifying oc objects
yed = Yedit(fname)
results = yed.delete('metadata.resourceVersion')
if results[0]:
yed.write()
cmd = ['replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''create a temporary file and then call oc create on it'''
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''call oc create on a filename'''
return self.openshift_cmd(['create', '-f', fname])
def _delete(self, resource, name=None, selector=None):
'''call oc delete on a resource'''
cmd = ['delete', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
elif name is not None:
cmd.append(name)
else:
raise OpenShiftCLIError('Either name or selector is required when calling delete.')
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
'''process a template
template_name: the name of the template to process
create: whether to send to oc create after processing
params: the parameters for the template
template_data: the incoming template's data; instead of a file
'''
cmd = ['process']
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["{}={}".format(key, str(value).replace("'", r'"')) for key, value in params.items()]
cmd.append('-v')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = Utils.create_tmpfile(template_name + '-')
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['create', '-f', fname])
def _get(self, resource, name=None, selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
elif name is not None:
cmd.append(name)
cmd.extend(['-o', 'json'])
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if 'items' in rval:
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
cmd.append('--schedulable={}'.format(schedulable))
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm list pods
node: the node in which to list pods
selector: the label selector filter if provided
pod_selector: the pod selector filter if provided
'''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
if grace_period:
cmd.append('--grace-period={}'.format(int(grace_period)))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _version(self):
''' return the openshift version'''
return self.openshift_cmd(['version'], output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
def _run(self, cmds, input_data):
''' Actually executes the command. This makes mocking easier. '''
curr_env = os.environ.copy()
curr_env.update({'KUBECONFIG': self.kubeconfig})
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=curr_env)
stdout, stderr = proc.communicate(input_data)
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = [self.oc_binary]
if oadm:
cmds.append('adm')
cmds.extend(cmd)
if self.all_namespaces:
cmds.extend(['--all-namespaces'])
elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
cmds.extend(['-n', self.namespace])
if self.verbose:
print(' '.join(cmds))
try:
returncode, stdout, stderr = self._run(cmds, input_data)
except OSError as ex:
returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex)
rval = {"returncode": returncode,
"cmd": ' '.join(cmds)}
if output_type == 'json':
rval['results'] = {}
if output and stdout:
try:
rval['results'] = json.loads(stdout)
except ValueError as verr:
if "No JSON object could be decoded" in verr.args:
rval['err'] = verr.args
elif output_type == 'raw':
rval['results'] = stdout if output else ''
if self.verbose:
print("STDOUT: {0}".format(stdout))
print("STDERR: {0}".format(stderr))
if 'err' in rval or returncode != 0:
rval.update({"stderr": stderr,
"stdout": stdout})
return rval
class Utils(object): # pragma: no cover
''' utilities for openshiftcli modules '''
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
with open(filename, 'w') as sfd:
sfd.write(str(contents))
@staticmethod
def create_tmp_file_from_contents(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripDumper'):
Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
else:
Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
Utils._write(tmp, data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [tmp])
return tmp
@staticmethod
def create_tmpfile_copy(inc_file):
'''create a temporary copy of a file'''
tmpfile = Utils.create_tmpfile('lib_openshift-')
Utils._write(tmpfile, open(inc_file).read())
# Cleanup the tmpfile
atexit.register(Utils.cleanup, [tmpfile])
return tmpfile
@staticmethod
def create_tmpfile(prefix='tmp'):
''' Generates and returns a temporary file name '''
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
@staticmethod
def create_tmp_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_tmp_file_from_contents(item['path'] + '-',
item['data'],
ftype=content_type)
files.append({'name': os.path.basename(item['path']),
'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if 'metadata' in result and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripLoader'):
contents = yaml.load(contents, yaml.RoundTripLoader)
else:
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
@staticmethod
def filter_versions(stdout):
''' filter the oc version output '''
version_dict = {}
version_search = ['oc', 'openshift', 'kubernetes']
for line in stdout.strip().split('\n'):
for term in version_search:
if not line:
continue
if line.startswith(term):
version_dict[term] = line.split()[-1]
# horrible hack to get openshift version in Openshift 3.2
# By default "oc version in 3.2 does not return an "openshift" version
if "openshift" not in version_dict:
version_dict["openshift"] = version_dict["oc"]
return version_dict
@staticmethod
def add_custom_versions(versions):
''' create custom versions strings '''
versions_dict = {}
for tech, version in versions.items():
# clean up "-" from version
if "-" in version:
version = version.split("-")[0]
if version.startswith('v'):
version = version[1:] # Remove the 'v' prefix
versions_dict[tech + '_numeric'] = version.split('+')[0]
# "3.3.0.33" is what we have, we want "3.3"
versions_dict[tech + '_short'] = "{}.{}".format(*version.split('.'))
return versions_dict
@staticmethod
def openshift_installed():
''' check if openshift is installed '''
import rpm
transaction_set = rpm.TransactionSet()
rpmquery = transaction_set.dbMatch("name", "atomic-openshift")
return rpmquery.count() > 0
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if key not in user_def:
if debug:
print('User data does not have key [%s]' % key)
print('User data: %s' % user_def)
return False
if not isinstance(user_def[key], list):
if debug:
print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
return False
if len(user_def[key]) != len(value):
if debug:
print("List lengths are not equal.")
print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
print("user_def: %s" % user_def[key])
print("value: %s" % value)
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print('sending list - list')
print(type(values[0]))
print(type(values[1]))
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print('list compare returned false')
return False
elif value != user_def[key]:
if debug:
print('value should be identical')
print(user_def[key])
print(value)
return False
# recurse on a dictionary
elif isinstance(value, dict):
if key not in user_def:
if debug:
print("user_def does not have key [%s]" % key)
return False
if not isinstance(user_def[key], dict):
if debug:
print("dict returned false: not instance of dict")
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print("keys are not equal in dict")
print(user_values)
print(api_values)
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print("dict returned false")
print(result)
return False
# Verify each key, value pair is the same
else:
if key not in user_def or value != user_def[key]:
if debug:
print("value not equal; user_def does not have key")
print(key)
print(value)
if key in user_def:
print(user_def[key])
return False
if debug:
print('returning true')
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self, ascommalist=''):
'''return all options as a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs'''
return self.stringify(ascommalist)
def stringify(self, ascommalist=''):
''' return the options hash as cli params in a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs '''
rval = []
for key in sorted(self.config_options.keys()):
data = self.config_options[key]
if data['include'] \
and (data['value'] is not None or isinstance(data['value'], int)):
if key == ascommalist:
val = ','.join(['{}={}'.format(kk, vv) for kk, vv in sorted(data['value'].items())])
else:
val = data['value']
rval.append('--{}={}'.format(key.replace('_', '-'), val))
return rval
# -*- -*- -*- End included fragment: lib/base.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/deploymentconfig.py -*- -*- -*-
# pylint: disable=too-many-public-methods
class DeploymentConfig(Yedit):
''' Class to model an openshift DeploymentConfig'''
default_deployment_config = '''
apiVersion: v1
kind: DeploymentConfig
metadata:
name: default_dc
namespace: default
spec:
replicas: 0
selector:
default_dc: default_dc
strategy:
resources: {}
rollingParams:
intervalSeconds: 1
maxSurge: 0
maxUnavailable: 25%
timeoutSeconds: 600
updatePercent: -25
updatePeriodSeconds: 1
type: Rolling
template:
metadata:
spec:
containers:
- env:
- name: default
value: default
image: default
imagePullPolicy: IfNotPresent
name: default_dc
ports:
- containerPort: 8000
hostPort: 8000
protocol: TCP
name: default_port
resources: {}
terminationMessagePath: /dev/termination-log
dnsPolicy: ClusterFirst
hostNetwork: true
nodeSelector:
type: compute
restartPolicy: Always
securityContext: {}
serviceAccount: default
serviceAccountName: default
terminationGracePeriodSeconds: 30
triggers:
- type: ConfigChange
'''
replicas_path = "spec.replicas"
env_path = "spec.template.spec.containers[0].env"
volumes_path = "spec.template.spec.volumes"
container_path = "spec.template.spec.containers"
volume_mounts_path = "spec.template.spec.containers[0].volumeMounts"
def __init__(self, content=None):
''' Constructor for deploymentconfig '''
if not content:
content = DeploymentConfig.default_deployment_config
super(DeploymentConfig, self).__init__(content=content)
def add_env_value(self, key, value):
''' add key, value pair to env array '''
rval = False
env = self.get_env_vars()
if env:
env.append({'name': key, 'value': value})
rval = True
else:
result = self.put(DeploymentConfig.env_path, {'name': key, 'value': value})
rval = result[0]
return rval
def exists_env_value(self, key, value):
''' return whether a key, value pair exists '''
results = self.get_env_vars()
if not results:
return False
for result in results:
if result['name'] == key and result['value'] == value:
return True
return False
def exists_env_key(self, key):
''' return whether a key, value pair exists '''
results = self.get_env_vars()
if not results:
return False
for result in results:
if result['name'] == key:
return True
return False
def get_env_var(self, key):
'''return a environment variables '''
results = self.get(DeploymentConfig.env_path) or []
if not results:
return None
for env_var in results:
if env_var['name'] == key:
return env_var
return None
def get_env_vars(self):
'''return a environment variables '''
return self.get(DeploymentConfig.env_path) or []
def delete_env_var(self, keys):
'''delete a list of keys '''
if not isinstance(keys, list):
keys = [keys]
env_vars_array = self.get_env_vars()
modified = False
for key in keys:
idx = None
for env_idx, env_var in enumerate(env_vars_array):
if env_var['name'] == key:
idx = env_idx
break
if idx:
modified = True
del env_vars_array[idx]
if modified:
return True
return False
def update_env_var(self, key, value):
'''place an env in the env var list'''
env_vars_array = self.get_env_vars()
idx = None
for env_idx, env_var in enumerate(env_vars_array):
if env_var['name'] == key:
idx = env_idx
break
if idx:
env_vars_array[idx]['value'] = value
else:
self.add_env_value(key, value)
return True
def exists_volume_mount(self, volume_mount):
''' return whether a volume mount exists '''
exist_volume_mounts = self.get_volume_mounts()
if not exist_volume_mounts:
return False
volume_mount_found = False
for exist_volume_mount in exist_volume_mounts:
if exist_volume_mount['name'] == volume_mount['name']:
volume_mount_found = True
break
return volume_mount_found
def exists_volume(self, volume):
''' return whether a volume exists '''
exist_volumes = self.get_volumes()
volume_found = False
for exist_volume in exist_volumes:
if exist_volume['name'] == volume['name']:
volume_found = True
break
return volume_found
def find_volume_by_name(self, volume, mounts=False):
''' return the index of a volume '''
volumes = []
if mounts:
volumes = self.get_volume_mounts()
else:
volumes = self.get_volumes()
for exist_volume in volumes:
if exist_volume['name'] == volume['name']:
return exist_volume
return None
def get_replicas(self):
''' return replicas setting '''
return self.get(DeploymentConfig.replicas_path)
def get_volume_mounts(self):
'''return volume mount information '''
return self.get_volumes(mounts=True)
def get_volumes(self, mounts=False):
'''return volume mount information '''
if mounts:
return self.get(DeploymentConfig.volume_mounts_path) or []
return self.get(DeploymentConfig.volumes_path) or []
def delete_volume_by_name(self, volume):
'''delete a volume '''
modified = False
exist_volume_mounts = self.get_volume_mounts()
exist_volumes = self.get_volumes()
del_idx = None
for idx, exist_volume in enumerate(exist_volumes):
if 'name' in exist_volume and exist_volume['name'] == volume['name']:
del_idx = idx
break
if del_idx != None:
del exist_volumes[del_idx]
modified = True
del_idx = None
for idx, exist_volume_mount in enumerate(exist_volume_mounts):
if 'name' in exist_volume_mount and exist_volume_mount['name'] == volume['name']:
del_idx = idx
break
if del_idx != None:
del exist_volume_mounts[idx]
modified = True
return modified
def add_volume_mount(self, volume_mount):
''' add a volume or volume mount to the proper location '''
exist_volume_mounts = self.get_volume_mounts()
if not exist_volume_mounts and volume_mount:
self.put(DeploymentConfig.volume_mounts_path, [volume_mount])
else:
exist_volume_mounts.append(volume_mount)
def add_volume(self, volume):
''' add a volume or volume mount to the proper location '''
exist_volumes = self.get_volumes()
if not volume:
return
if not exist_volumes:
self.put(DeploymentConfig.volumes_path, [volume])
else:
exist_volumes.append(volume)
def update_replicas(self, replicas):
''' update replicas value '''
self.put(DeploymentConfig.replicas_path, replicas)
def update_volume(self, volume):
'''place an env in the env var list'''
exist_volumes = self.get_volumes()
if not volume:
return False
# update the volume
update_idx = None
for idx, exist_vol in enumerate(exist_volumes):
if exist_vol['name'] == volume['name']:
update_idx = idx
break
if update_idx != None:
exist_volumes[update_idx] = volume
else:
self.add_volume(volume)
return True
def update_volume_mount(self, volume_mount):
'''place an env in the env var list'''
modified = False
exist_volume_mounts = self.get_volume_mounts()
if not volume_mount:
return False
# update the volume mount
for exist_vol_mount in exist_volume_mounts:
if exist_vol_mount['name'] == volume_mount['name']:
if 'mountPath' in exist_vol_mount and \
str(exist_vol_mount['mountPath']) != str(volume_mount['mountPath']):
exist_vol_mount['mountPath'] = volume_mount['mountPath']
modified = True
break
if not modified:
self.add_volume_mount(volume_mount)
modified = True
return modified
def needs_update_volume(self, volume, volume_mount):
''' verify a volume update is needed '''
exist_volume = self.find_volume_by_name(volume)
exist_volume_mount = self.find_volume_by_name(volume, mounts=True)
results = []
results.append(exist_volume['name'] == volume['name'])
if 'secret' in volume:
results.append('secret' in exist_volume)
results.append(exist_volume['secret']['secretName'] == volume['secret']['secretName'])
results.append(exist_volume_mount['name'] == volume_mount['name'])
results.append(exist_volume_mount['mountPath'] == volume_mount['mountPath'])
elif 'emptyDir' in volume:
results.append(exist_volume_mount['name'] == volume['name'])
results.append(exist_volume_mount['mountPath'] == volume_mount['mountPath'])
elif 'persistentVolumeClaim' in volume:
pvc = 'persistentVolumeClaim'
results.append(pvc in exist_volume)
if results[-1]:
results.append(exist_volume[pvc]['claimName'] == volume[pvc]['claimName'])
if 'claimSize' in volume[pvc]:
results.append(exist_volume[pvc]['claimSize'] == volume[pvc]['claimSize'])
elif 'hostpath' in volume:
results.append('hostPath' in exist_volume)
results.append(exist_volume['hostPath']['path'] == volume_mount['mountPath'])
return not all(results)
def needs_update_replicas(self, replicas):
''' verify whether a replica update is needed '''
current_reps = self.get(DeploymentConfig.replicas_path)
return not current_reps == replicas
# -*- -*- -*- End included fragment: lib/deploymentconfig.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/volume.py -*- -*- -*-
class Volume(object):
''' Class to represent an openshift volume object'''
volume_mounts_path = {"pod": "spec.containers[0].volumeMounts",
"dc": "spec.template.spec.containers[0].volumeMounts",
"rc": "spec.template.spec.containers[0].volumeMounts",
}
volumes_path = {"pod": "spec.volumes",
"dc": "spec.template.spec.volumes",
"rc": "spec.template.spec.volumes",
}
@staticmethod
def create_volume_structure(volume_info):
''' return a properly structured volume '''
volume_mount = None
volume = {'name': volume_info['name']}
volume_type = volume_info['type'].lower()
if volume_type == 'secret':
volume['secret'] = {}
volume[volume_info['type']] = {'secretName': volume_info['secret_name']}
volume_mount = {'mountPath': volume_info['path'],
'name': volume_info['name']}
elif volume_type == 'emptydir':
volume['emptyDir'] = {}
volume_mount = {'mountPath': volume_info['path'],
'name': volume_info['name']}
elif volume_type == 'pvc' or volume_type == 'persistentvolumeclaim':
volume['persistentVolumeClaim'] = {}
volume['persistentVolumeClaim']['claimName'] = volume_info['claimName']
volume['persistentVolumeClaim']['claimSize'] = volume_info['claimSize']
elif volume_type == 'hostpath':
volume['hostPath'] = {}
volume['hostPath']['path'] = volume_info['path']
elif volume_type == 'configmap':
volume['configMap'] = {}
volume['configMap']['name'] = volume_info['configmap_name']
volume_mount = {'mountPath': volume_info['path'],
'name': volume_info['name']}
return (volume, volume_mount)
# -*- -*- -*- End included fragment: lib/volume.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: class/oc_volume.py -*- -*- -*-
# pylint: disable=too-many-instance-attributes
class OCVolume(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
volume_mounts_path = {"pod": "spec.containers[0].volumeMounts",
"dc": "spec.template.spec.containers[0].volumeMounts",
"rc": "spec.template.spec.containers[0].volumeMounts",
}
volumes_path = {"pod": "spec.volumes",
"dc": "spec.template.spec.volumes",
"rc": "spec.template.spec.volumes",
}
# pylint allows 5
# pylint: disable=too-many-arguments
def __init__(self,
kind,
resource_name,
namespace,
vol_name,
mount_path,
mount_type,
secret_name,
claim_size,
claim_name,
configmap_name,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False):
''' Constructor for OCVolume '''
super(OCVolume, self).__init__(namespace, kubeconfig)
self.kind = kind
self.volume_info = {'name': vol_name,
'secret_name': secret_name,
'path': mount_path,
'type': mount_type,
'claimSize': claim_size,
'claimName': claim_name,
'configmap_name': configmap_name}
self.volume, self.volume_mount = Volume.create_volume_structure(self.volume_info)
self.name = resource_name
self.namespace = namespace
self.kubeconfig = kubeconfig
self.verbose = verbose
self._resource = None
@property
def resource(self):
''' property function for resource var '''
if not self._resource:
self.get()
return self._resource
@resource.setter
def resource(self, data):
''' setter function for resource var '''
self._resource = data
def exists(self):
''' return whether a volume exists '''
volume_mount_found = False
volume_found = self.resource.exists_volume(self.volume)
if not self.volume_mount and volume_found:
return True
if self.volume_mount:
volume_mount_found = self.resource.exists_volume_mount(self.volume_mount)
if volume_found and self.volume_mount and volume_mount_found:
return True
return False
def get(self):
'''return volume information '''
vol = self._get(self.kind, self.name)
if vol['returncode'] == 0:
if self.kind == 'dc':
self.resource = DeploymentConfig(content=vol['results'][0])
vol['results'] = self.resource.get_volumes()
return vol
def delete(self):
'''remove a volume'''
self.resource.delete_volume_by_name(self.volume)
return self._replace_content(self.kind, self.name, self.resource.yaml_dict)
def put(self):
'''place volume into dc '''
self.resource.update_volume(self.volume)
self.resource.get_volumes()
self.resource.update_volume_mount(self.volume_mount)
return self._replace_content(self.kind, self.name, self.resource.yaml_dict)
def needs_update(self):
''' verify an update is needed '''
return self.resource.needs_update_volume(self.volume, self.volume_mount)
# pylint: disable=too-many-branches,too-many-return-statements
@staticmethod
def run_ansible(params, check_mode=False):
'''run the idempotent ansible code'''
oc_volume = OCVolume(params['kind'],
params['name'],
params['namespace'],
params['vol_name'],
params['mount_path'],
params['mount_type'],
# secrets
params['secret_name'],
# pvc
params['claim_size'],
params['claim_name'],
# configmap
params['configmap_name'],
kubeconfig=params['kubeconfig'],
verbose=params['debug'])
state = params['state']
api_rval = oc_volume.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
#####
# Get
#####
if state == 'list':
return {'changed': False, 'results': api_rval['results'], 'state': state}
########
# Delete
########
if state == 'absent':
if oc_volume.exists():
if check_mode:
return {'changed': False, 'msg': 'CHECK_MODE: Would have performed a delete.'}
api_rval = oc_volume.delete()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': state}
return {'changed': False, 'state': state}
if state == 'present':
########
# Create
########
if not oc_volume.exists():
if check_mode:
return {'changed': True, 'msg': 'CHECK_MODE: Would have performed a create.'}
# Create it here
api_rval = oc_volume.put()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
# return the created object
api_rval = oc_volume.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': state}
########
# Update
########
if oc_volume.needs_update():
api_rval = oc_volume.put()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
# return the created object
api_rval = oc_volume.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, state: state}
return {'changed': False, 'results': api_rval, state: state}
return {'failed': True, 'msg': 'Unknown state passed. {}'.format(state)}
# -*- -*- -*- End included fragment: class/oc_volume.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: ansible/oc_volume.py -*- -*- -*-
def main():
'''
ansible oc module for volumes
'''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
debug=dict(default=False, type='bool'),
kind=dict(default='dc', choices=['dc', 'rc', 'pods'], type='str'),
namespace=dict(default='default', type='str'),
vol_name=dict(default=None, type='str'),
name=dict(default=None, type='str'),
mount_type=dict(default=None,
choices=['emptydir', 'hostpath', 'secret', 'pvc', 'configmap'],
type='str'),
mount_path=dict(default=None, type='str'),
# secrets require a name
secret_name=dict(default=None, type='str'),
# pvc requires a size
claim_size=dict(default=None, type='str'),
claim_name=dict(default=None, type='str'),
# configmap requires a name
configmap_name=dict(default=None, type='str'),
),
supports_check_mode=True,
)
rval = OCVolume.run_ansible(module.params, module.check_mode)
if 'failed' in rval:
module.fail_json(**rval)
module.exit_json(**rval)
if __name__ == '__main__':
main()
# -*- -*- -*- End included fragment: ansible/oc_volume.py -*- -*- -*-
|
martijnthe/qemu_stm32 | refs/heads/stm32-v1.4 | scripts/qapi-commands.py | 27 | #
# QAPI command marshaller generator
#
# Copyright IBM, Corp. 2011
#
# Authors:
# Anthony Liguori <[email protected]>
# Michael Roth <[email protected]>
#
# This work is licensed under the terms of the GNU GPLv2.
# See the COPYING.LIB file in the top-level directory.
from ordereddict import OrderedDict
from qapi import *
import sys
import os
import getopt
import errno
def type_visitor(name):
if type(name) == list:
return 'visit_type_%sList' % name[0]
else:
return 'visit_type_%s' % name
def generate_decl_enum(name, members, genlist=True):
return mcgen('''
void %(visitor)s(Visitor *m, %(name)s * obj, const char *name, Error **errp);
''',
visitor=type_visitor(name))
def generate_command_decl(name, args, ret_type):
arglist=""
for argname, argtype, optional, structured in parse_args(args):
argtype = c_type(argtype)
if argtype == "char *":
argtype = "const char *"
if optional:
arglist += "bool has_%s, " % c_var(argname)
arglist += "%s %s, " % (argtype, c_var(argname))
return mcgen('''
%(ret_type)s qmp_%(name)s(%(args)sError **errp);
''',
ret_type=c_type(ret_type), name=c_fun(name), args=arglist).strip()
def gen_sync_call(name, args, ret_type, indent=0):
ret = ""
arglist=""
retval=""
if ret_type:
retval = "retval = "
for argname, argtype, optional, structured in parse_args(args):
if optional:
arglist += "has_%s, " % c_var(argname)
arglist += "%s, " % (c_var(argname))
push_indent(indent)
ret = mcgen('''
%(retval)sqmp_%(name)s(%(args)serrp);
''',
name=c_fun(name), args=arglist, retval=retval).rstrip()
if ret_type:
ret += "\n" + mcgen(''''
if (!error_is_set(errp)) {
%(marshal_output_call)s
}
''',
marshal_output_call=gen_marshal_output_call(name, ret_type)).rstrip()
pop_indent(indent)
return ret.rstrip()
def gen_marshal_output_call(name, ret_type):
if not ret_type:
return ""
return "qmp_marshal_output_%s(retval, ret, errp);" % c_fun(name)
def gen_visitor_output_containers_decl(ret_type):
ret = ""
push_indent()
if ret_type:
ret += mcgen('''
QmpOutputVisitor *mo;
QapiDeallocVisitor *md;
Visitor *v;
''')
pop_indent()
return ret
def gen_visitor_input_containers_decl(args):
ret = ""
push_indent()
if len(args) > 0:
ret += mcgen('''
QmpInputVisitor *mi;
QapiDeallocVisitor *md;
Visitor *v;
''')
pop_indent()
return ret.rstrip()
def gen_visitor_input_vars_decl(args):
ret = ""
push_indent()
for argname, argtype, optional, structured in parse_args(args):
if optional:
ret += mcgen('''
bool has_%(argname)s = false;
''',
argname=c_var(argname))
if c_type(argtype).endswith("*"):
ret += mcgen('''
%(argtype)s %(argname)s = NULL;
''',
argname=c_var(argname), argtype=c_type(argtype))
else:
ret += mcgen('''
%(argtype)s %(argname)s;
''',
argname=c_var(argname), argtype=c_type(argtype))
pop_indent()
return ret.rstrip()
def gen_visitor_input_block(args, obj, dealloc=False):
ret = ""
if len(args) == 0:
return ret
push_indent()
if dealloc:
ret += mcgen('''
md = qapi_dealloc_visitor_new();
v = qapi_dealloc_get_visitor(md);
''')
else:
ret += mcgen('''
mi = qmp_input_visitor_new_strict(%(obj)s);
v = qmp_input_get_visitor(mi);
''',
obj=obj)
for argname, argtype, optional, structured in parse_args(args):
if optional:
ret += mcgen('''
visit_start_optional(v, &has_%(c_name)s, "%(name)s", errp);
if (has_%(c_name)s) {
''',
c_name=c_var(argname), name=argname)
push_indent()
ret += mcgen('''
%(visitor)s(v, &%(c_name)s, "%(name)s", errp);
''',
c_name=c_var(argname), name=argname, argtype=argtype,
visitor=type_visitor(argtype))
if optional:
pop_indent()
ret += mcgen('''
}
visit_end_optional(v, errp);
''')
if dealloc:
ret += mcgen('''
qapi_dealloc_visitor_cleanup(md);
''')
else:
ret += mcgen('''
qmp_input_visitor_cleanup(mi);
''')
pop_indent()
return ret.rstrip()
def gen_marshal_output(name, args, ret_type, middle_mode):
if not ret_type:
return ""
ret = mcgen('''
static void qmp_marshal_output_%(c_name)s(%(c_ret_type)s ret_in, QObject **ret_out, Error **errp)
{
QapiDeallocVisitor *md = qapi_dealloc_visitor_new();
QmpOutputVisitor *mo = qmp_output_visitor_new();
Visitor *v;
v = qmp_output_get_visitor(mo);
%(visitor)s(v, &ret_in, "unused", errp);
if (!error_is_set(errp)) {
*ret_out = qmp_output_get_qobject(mo);
}
qmp_output_visitor_cleanup(mo);
v = qapi_dealloc_get_visitor(md);
%(visitor)s(v, &ret_in, "unused", errp);
qapi_dealloc_visitor_cleanup(md);
}
''',
c_ret_type=c_type(ret_type), c_name=c_fun(name),
visitor=type_visitor(ret_type))
return ret
def gen_marshal_input_decl(name, args, ret_type, middle_mode):
if middle_mode:
return 'int qmp_marshal_input_%s(Monitor *mon, const QDict *qdict, QObject **ret)' % c_fun(name)
else:
return 'static void qmp_marshal_input_%s(QDict *args, QObject **ret, Error **errp)' % c_fun(name)
def gen_marshal_input(name, args, ret_type, middle_mode):
hdr = gen_marshal_input_decl(name, args, ret_type, middle_mode)
ret = mcgen('''
%(header)s
{
''',
header=hdr)
if middle_mode:
ret += mcgen('''
Error *local_err = NULL;
Error **errp = &local_err;
QDict *args = (QDict *)qdict;
''')
if ret_type:
if c_type(ret_type).endswith("*"):
retval = " %s retval = NULL;" % c_type(ret_type)
else:
retval = " %s retval;" % c_type(ret_type)
ret += mcgen('''
%(retval)s
''',
retval=retval)
if len(args) > 0:
ret += mcgen('''
%(visitor_input_containers_decl)s
%(visitor_input_vars_decl)s
%(visitor_input_block)s
''',
visitor_input_containers_decl=gen_visitor_input_containers_decl(args),
visitor_input_vars_decl=gen_visitor_input_vars_decl(args),
visitor_input_block=gen_visitor_input_block(args, "QOBJECT(args)"))
else:
ret += mcgen('''
(void)args;
''')
ret += mcgen('''
if (error_is_set(errp)) {
goto out;
}
%(sync_call)s
''',
sync_call=gen_sync_call(name, args, ret_type, indent=4))
ret += mcgen('''
out:
''')
ret += mcgen('''
%(visitor_input_block_cleanup)s
''',
visitor_input_block_cleanup=gen_visitor_input_block(args, None,
dealloc=True))
if middle_mode:
ret += mcgen('''
if (local_err) {
qerror_report_err(local_err);
error_free(local_err);
return -1;
}
return 0;
''')
else:
ret += mcgen('''
return;
''')
ret += mcgen('''
}
''')
return ret
def option_value_matches(opt, val, cmd):
if opt in cmd and cmd[opt] == val:
return True
return False
def gen_registry(commands):
registry=""
push_indent()
for cmd in commands:
options = 'QCO_NO_OPTIONS'
if option_value_matches('success-response', 'no', cmd):
options = 'QCO_NO_SUCCESS_RESP'
registry += mcgen('''
qmp_register_command("%(name)s", qmp_marshal_input_%(c_name)s, %(opts)s);
''',
name=cmd['command'], c_name=c_fun(cmd['command']),
opts=options)
pop_indent()
ret = mcgen('''
static void qmp_init_marshal(void)
{
%(registry)s
}
qapi_init(qmp_init_marshal);
''',
registry=registry.rstrip())
return ret
def gen_command_decl_prologue(header, guard, prefix=""):
ret = mcgen('''
/* THIS FILE IS AUTOMATICALLY GENERATED, DO NOT MODIFY */
/*
* schema-defined QAPI function prototypes
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <[email protected]>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
#ifndef %(guard)s
#define %(guard)s
#include "%(prefix)sqapi-types.h"
#include "qapi/qmp/qdict.h"
#include "qapi/error.h"
''',
header=basename(header), guard=guardname(header), prefix=prefix)
return ret
def gen_command_def_prologue(prefix="", proxy=False):
ret = mcgen('''
/* THIS FILE IS AUTOMATICALLY GENERATED, DO NOT MODIFY */
/*
* schema-defined QMP->QAPI command dispatch
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <[email protected]>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
#include "qemu-common.h"
#include "qemu/module.h"
#include "qapi/qmp/qerror.h"
#include "qapi/qmp/types.h"
#include "qapi/qmp/dispatch.h"
#include "qapi/visitor.h"
#include "qapi/qmp-output-visitor.h"
#include "qapi/qmp-input-visitor.h"
#include "qapi/dealloc-visitor.h"
#include "%(prefix)sqapi-types.h"
#include "%(prefix)sqapi-visit.h"
''',
prefix=prefix)
if not proxy:
ret += '#include "%sqmp-commands.h"' % prefix
return ret + "\n\n"
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], "chp:o:m",
["source", "header", "prefix=",
"output-dir=", "type=", "middle"])
except getopt.GetoptError, err:
print str(err)
sys.exit(1)
output_dir = ""
prefix = ""
dispatch_type = "sync"
c_file = 'qmp-marshal.c'
h_file = 'qmp-commands.h'
middle_mode = False
do_c = False
do_h = False
for o, a in opts:
if o in ("-p", "--prefix"):
prefix = a
elif o in ("-o", "--output-dir"):
output_dir = a + "/"
elif o in ("-t", "--type"):
dispatch_type = a
elif o in ("-m", "--middle"):
middle_mode = True
elif o in ("-c", "--source"):
do_c = True
elif o in ("-h", "--header"):
do_h = True
if not do_c and not do_h:
do_c = True
do_h = True
c_file = output_dir + prefix + c_file
h_file = output_dir + prefix + h_file
def maybe_open(really, name, opt):
if really:
return open(name, opt)
else:
import StringIO
return StringIO.StringIO()
try:
os.makedirs(output_dir)
except os.error, e:
if e.errno != errno.EEXIST:
raise
exprs = parse_schema(sys.stdin)
commands = filter(lambda expr: expr.has_key('command'), exprs)
commands = filter(lambda expr: not expr.has_key('gen'), commands)
if dispatch_type == "sync":
fdecl = maybe_open(do_h, h_file, 'w')
fdef = maybe_open(do_c, c_file, 'w')
ret = gen_command_decl_prologue(header=basename(h_file), guard=guardname(h_file), prefix=prefix)
fdecl.write(ret)
ret = gen_command_def_prologue(prefix=prefix)
fdef.write(ret)
for cmd in commands:
arglist = []
ret_type = None
if cmd.has_key('data'):
arglist = cmd['data']
if cmd.has_key('returns'):
ret_type = cmd['returns']
ret = generate_command_decl(cmd['command'], arglist, ret_type) + "\n"
fdecl.write(ret)
if ret_type:
ret = gen_marshal_output(cmd['command'], arglist, ret_type, middle_mode) + "\n"
fdef.write(ret)
if middle_mode:
fdecl.write('%s;\n' % gen_marshal_input_decl(cmd['command'], arglist, ret_type, middle_mode))
ret = gen_marshal_input(cmd['command'], arglist, ret_type, middle_mode) + "\n"
fdef.write(ret)
fdecl.write("\n#endif\n");
if not middle_mode:
ret = gen_registry(commands)
fdef.write(ret)
fdef.flush()
fdef.close()
fdecl.flush()
fdecl.close()
|
pombredanne/grokmirror | refs/heads/master | grokmirror/fsck.py | 1 | #-*- coding: utf-8 -*-
# Copyright (C) 2013 by The Linux Foundation and contributors
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import grokmirror
import logging
import time
import json
import subprocess
import random
import time
import datetime
from fcntl import lockf, LOCK_EX, LOCK_UN, LOCK_NB
# default basic logger. We override it later.
logger = logging.getLogger(__name__)
def run_git_prune(fullpath, config, manifest):
if 'prune' not in config.keys() or config['prune'] != 'yes':
return
# Are any other repos using us in their objects/info/alternates?
gitdir = '/' + os.path.relpath(fullpath, config['toplevel']).lstrip('/')
repolist = grokmirror.find_all_alt_repos(gitdir, manifest)
if len(repolist):
logger.debug('Not pruning %s as other repos use it as alternates' % gitdir)
return
try:
grokmirror.lock_repo(fullpath, nonblocking=True)
except IOError:
logger.info('Could not obtain exclusive lock on %s' % fullpath)
logger.info('Will prune next time')
return
env = {'GIT_DIR': fullpath}
args = ['/usr/bin/git', 'prune']
logger.info('Pruning %s' % fullpath)
logger.debug('Running: GIT_DIR=%s %s' % (env['GIT_DIR'], ' '.join(args)))
(output, error) = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=env).communicate()
error = error.strip()
if error:
# Put things we recognize as fairly benign into debug
debug = []
warn = []
for line in error.split('\n'):
ignored = False
for estring in config['ignore_errors']:
if line.find(estring) != -1:
ignored = True
debug.append(line)
break
if not ignored:
warn.append(line)
if debug:
logger.debug('Stderr: %s' % '\n'.join(debug))
if warn:
logger.critical('Pruning %s returned critical errors:' % fullpath)
for entry in warn:
logger.critical("\t%s" % entry)
grokmirror.unlock_repo(fullpath)
def run_git_repack(fullpath, config, full_repack=False):
if 'repack' not in config.keys() or config['repack'] != 'yes':
return
try:
grokmirror.lock_repo(fullpath, nonblocking=True)
except IOError:
logger.info('Could not obtain exclusive lock on %s' % fullpath)
logger.info('Will repack next time')
return
repack_flags = '-A -d -l -q'
if full_repack and 'full_repack_flags' in config.keys():
repack_flags = config['full_repack_flags']
logger.info('Time to do a full repack of %s' % fullpath)
elif 'repack_flags' in config.keys():
repack_flags = config['repack_flags']
flags = repack_flags.split()
env = {'GIT_DIR': fullpath}
args = ['/usr/bin/git', 'repack'] + flags
logger.info('Repacking %s' % fullpath)
logger.debug('Running: GIT_DIR=%s %s' % (env['GIT_DIR'], ' '.join(args)))
(output, error) = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=env).communicate()
error = error.strip()
# With newer versions of git, repack may return warnings that are safe to ignore
# so use the same strategy to weed out things we aren't interested in seeing
if error:
# Put things we recognize as fairly benign into debug
debug = []
warn = []
for line in error.split('\n'):
ignored = False
for estring in config['ignore_errors']:
if line.find(estring) != -1:
ignored = True
debug.append(line)
break
if not ignored:
warn.append(line)
if debug:
logger.debug('Stderr: %s' % '\n'.join(debug))
if warn:
logger.critical('Repacking %s returned critical errors:' % fullpath)
for entry in warn:
logger.critical("\t%s" % entry)
# repacking refs requires a separate command, so run it now
args = ['/usr/bin/git', 'pack-refs', '--all']
logger.debug('Repacking refs in %s' % fullpath)
logger.debug('Running: GIT_DIR=%s %s' % (env['GIT_DIR'], ' '.join(args)))
(output, error) = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=env).communicate()
# pack-refs shouldn't return anything, but use the same ignore_errors block
# to weed out any future potential benign warnings
if error:
# Put things we recognize as fairly benign into debug
debug = []
warn = []
for line in error.split('\n'):
ignored = False
for estring in config['ignore_errors']:
if line.find(estring) != -1:
ignored = True
debug.append(line)
break
if not ignored:
warn.append(line)
if debug:
logger.debug('Stderr: %s' % '\n'.join(debug))
if warn:
logger.critical('Repacking refs %s returned critical errors:' % fullpath)
for entry in warn:
logger.critical("\t%s" % entry)
grokmirror.unlock_repo(fullpath)
def run_git_fsck(fullpath, config):
# Lock the git repository so no other grokmirror process attempts to
# modify it while we're running git ops. If we miss this window, we
# may not check the repo again for a long time, so block until the lock
# is available.
try:
grokmirror.lock_repo(fullpath, nonblocking=True)
except IOError:
logger.info('Could not obtain exclusive lock on %s' % fullpath)
logger.info('Will fsck next time')
return
env = {'GIT_DIR': fullpath}
args = ['/usr/bin/git', 'fsck', '--full']
logger.info('Checking %s' % fullpath)
logger.debug('Running: GIT_DIR=%s %s' % (env['GIT_DIR'], ' '.join(args)))
(output, error) = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=env).communicate()
error = error.strip()
if error:
# Put things we recognize as fairly benign into debug
debug = []
warn = []
for line in error.split('\n'):
ignored = False
for estring in config['ignore_errors']:
if line.find(estring) != -1:
ignored = True
debug.append(line)
break
if not ignored:
warn.append(line)
if debug:
logger.debug('Stderr: %s' % '\n'.join(debug))
if warn:
logger.critical('%s has critical errors:' % fullpath)
for entry in warn:
logger.critical("\t%s" % entry)
grokmirror.unlock_repo(fullpath)
def fsck_mirror(name, config, verbose=False, force=False):
global logger
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
if 'log' in config.keys():
ch = logging.FileHandler(config['log'])
formatter = logging.Formatter(
"[%(process)d] %(asctime)s - %(levelname)s - %(message)s")
ch.setFormatter(formatter)
loglevel = logging.INFO
if 'loglevel' in config.keys():
if config['loglevel'] == 'debug':
loglevel = logging.DEBUG
ch.setLevel(loglevel)
logger.addHandler(ch)
ch = logging.StreamHandler()
formatter = logging.Formatter('%(message)s')
ch.setFormatter(formatter)
if verbose:
ch.setLevel(logging.INFO)
else:
ch.setLevel(logging.CRITICAL)
logger.addHandler(ch)
# push it into grokmirror to override the default logger
grokmirror.logger = logger
logger.info('Running grok-fsck for [%s]' % name)
# Lock the tree to make sure we only run one instance
logger.debug('Attempting to obtain lock on %s' % config['lock'])
flockh = open(config['lock'], 'w')
try:
lockf(flockh, LOCK_EX | LOCK_NB)
except IOError:
logger.info('Could not obtain exclusive lock on %s' % config['lock'])
logger.info('Assuming another process is running.')
return 0
manifest = grokmirror.read_manifest(config['manifest'])
if os.path.exists(config['statusfile']):
logger.info('Reading status from %s' % config['statusfile'])
stfh = open(config['statusfile'], 'r')
try:
# Format of the status file:
# {
# '/full/path/to/repository': {
# 'lastcheck': 'YYYY-MM-DD' or 'never',
# 'nextcheck': 'YYYY-MM-DD',
# 'lastrepack': 'YYYY-MM-DD',
# 'fingerprint': 'sha-1',
# 's_elapsed': seconds,
# 'quick_repack_count': times,
# },
# ...
# }
status = json.load(stfh)
except:
# Huai le!
logger.critical('Failed to parse %s' % config['statusfile'])
lockf(flockh, LOCK_UN)
flockh.close()
return 1
else:
status = {}
frequency = int(config['frequency'])
today = datetime.datetime.today()
# Go through the manifest and compare with status
for gitdir in manifest.keys():
fullpath = os.path.join(config['toplevel'], gitdir.lstrip('/'))
if fullpath not in status.keys():
# Newly added repository
# Randomize next check between now and frequency
delay = random.randint(0, frequency)
nextdate = today + datetime.timedelta(days=delay)
nextcheck = nextdate.strftime('%F')
status[fullpath] = {
'lastcheck': 'never',
'nextcheck': nextcheck,
}
logger.info('Added new repository %s with next check on %s' % (
gitdir, nextcheck))
total_checked = 0
total_elapsed = 0
# Go through status and queue checks for all the dirs that are due today
# (unless --force, which is EVERYTHING)
todayiso = today.strftime('%F')
for fullpath in status.keys():
# Check to make sure it's still in the manifest
gitdir = fullpath.replace(config['toplevel'], '', 1)
gitdir = '/' + gitdir.lstrip('/')
if gitdir not in manifest.keys():
del status[fullpath]
logger.info('Removed %s which is no longer in manifest' % gitdir)
continue
# If nextcheck is before today, set it to today
# XXX: If a system comes up after being in downtime for a while, this
# may cause pain for them, so perhaps use randomization here?
nextcheck = datetime.datetime.strptime(status[fullpath]['nextcheck'],
'%Y-%m-%d')
if force or nextcheck <= today:
logger.debug('Preparing to check %s' % fullpath)
# Calculate elapsed seconds
startt = time.time()
run_git_fsck(fullpath, config)
total_checked += 1
# Did the fingerprint change since last time we repacked?
oldfpr = None
if 'fingerprint' in status[fullpath].keys():
oldfpr = status[fullpath]['fingerprint']
fpr = grokmirror.get_repo_fingerprint(config['toplevel'], gitdir, force=True)
if fpr != oldfpr or force:
full_repack = False
if not 'quick_repack_count' in status[fullpath].keys():
status[fullpath]['quick_repack_count'] = 0
quick_repack_count = status[fullpath]['quick_repack_count']
if 'full_repack_every' in config.keys():
# but did you set 'full_repack_flags' as well?
if 'full_repack_flags' not in config.keys():
logger.critical('full_repack_every is set, but not full_repack_flags')
else:
full_repack_every = int(config['full_repack_every'])
# is it anything insane?
if full_repack_every < 2:
full_repack_every = 2
logger.warning('full_repack_every is too low, forced to 2')
# is it time to trigger full repack?
# We -1 because if we want a repack every 10th time, then we need to trigger
# when current repack count is 9.
if quick_repack_count >= full_repack_every-1:
logger.debug('Time to do full repack on %s' % fullpath)
full_repack = True
quick_repack_count = 0
status[fullpath]['lastfullrepack'] = todayiso
else:
logger.debug('Repack count for %s not yet reached full repack trigger' % fullpath)
quick_repack_count += 1
run_git_repack(fullpath, config, full_repack)
run_git_prune(fullpath, config, manifest)
status[fullpath]['lastrepack'] = todayiso
status[fullpath]['quick_repack_count'] = quick_repack_count
else:
logger.debug('No changes to %s since last run, not repacking' % gitdir)
endt = time.time()
total_elapsed += endt-startt
status[fullpath]['fingerprint'] = fpr
status[fullpath]['lastcheck'] = todayiso
status[fullpath]['s_elapsed'] = int(endt - startt)
if force:
# Use randomization for next check, again
delay = random.randint(1, frequency)
else:
delay = frequency
nextdate = today + datetime.timedelta(days=delay)
status[fullpath]['nextcheck'] = nextdate.strftime('%F')
# Write status file after each check, so if the process dies, we won't
# have to recheck all the repos we've already checked
logger.debug('Updating status file in %s' % config['statusfile'])
stfh = open(config['statusfile'], 'w')
json.dump(status, stfh, indent=2)
stfh.close()
if not total_checked:
logger.info('No new repos to check.')
else:
logger.info('Repos checked: %s' % total_checked)
logger.info('Total running time: %s s' % int(total_elapsed))
lockf(flockh, LOCK_UN)
flockh.close()
def parse_args():
from optparse import OptionParser
usage = '''usage: %prog -c fsck.conf
Run a git-fsck check on grokmirror-managed repositories.
'''
op = OptionParser(usage=usage, version=grokmirror.VERSION)
op.add_option('-v', '--verbose', dest='verbose', action='store_true',
default=False,
help='Be verbose and tell us what you are doing')
op.add_option('-f', '--force', dest='force',
action='store_true', default=False,
help='Force immediate run on all repositories.')
op.add_option('-c', '--config', dest='config',
help='Location of fsck.conf')
opts, args = op.parse_args()
if not opts.config:
op.error('You must provide the path to the config file')
return opts, args
def grok_fsck(config, verbose=False, force=False):
from ConfigParser import ConfigParser
ini = ConfigParser()
ini.read(config)
for section in ini.sections():
config = {}
for (option, value) in ini.items(section):
config[option] = value
if 'ignore_errors' not in config:
config['ignore_errors'] = [
'dangling commit',
'dangling blob',
'notice: HEAD points to an unborn branch',
'notice: No default references',
'contains zero-padded file modes',
]
else:
ignore_errors = []
for estring in config['ignore_errors'].split('\n'):
ignore_errors.append(estring.strip())
config['ignore_errors'] = ignore_errors
fsck_mirror(section, config, verbose, force)
def command():
opts, args = parse_args()
return grok_fsck(opts.config, opts.verbose, opts.force)
if __name__ == '__main__':
command()
|
quinot/ansible | refs/heads/devel | lib/ansible/modules/cloud/amazon/execute_lambda.py | 16 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: execute_lambda
short_description: Execute an AWS Lambda function
description:
- This module executes AWS Lambda functions, allowing synchronous and asynchronous
invocation.
version_added: "2.2"
extends_documentation_fragment:
- aws
- ec2
author: "Ryan Scott Brown (@ryansb) <[email protected]>"
requirements:
- python >= 2.6
- boto3
notes:
- Async invocation will always return an empty C(output) key.
- Synchronous invocation may result in a function timeout, resulting in an
empty C(output) key.
options:
name:
description:
- The name of the function to be invoked. This can only be used for
invocations within the calling account. To invoke a function in another
account, use I(function_arn) to specify the full ARN.
required: false
default: None
function_arn:
description:
- The name of the function to be invoked
required: false
default: None
tail_log:
description:
- If C(tail_log=true), the result of the task will include the last 4 KB
of the CloudWatch log for the function execution. Log tailing only
works if you use synchronous invocation C(wait=true). This is usually
used for development or testing Lambdas.
required: false
default: false
wait:
description:
- Whether to wait for the function results or not. If I(wait) is false,
the task will not return any results. To wait for the Lambda function
to complete, set C(wait=true) and the result will be available in the
I(output) key.
required: false
default: true
dry_run:
description:
- Do not *actually* invoke the function. A C(DryRun) call will check that
the caller has permissions to call the function, especially for
checking cross-account permissions.
required: false
default: False
version_qualifier:
description:
- Which version/alias of the function to run. This defaults to the
C(LATEST) revision, but can be set to any existing version or alias.
See https;//docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html
for details.
required: false
default: LATEST
payload:
description:
- A dictionary in any form to be provided as input to the Lambda function.
required: false
default: {}
'''
EXAMPLES = '''
- execute_lambda:
name: test-function
# the payload is automatically serialized and sent to the function
payload:
foo: bar
value: 8
register: response
# Test that you have sufficient permissions to execute a Lambda function in
# another account
- execute_lambda:
function_arn: arn:aws:lambda:us-east-1:123456789012:function/some-function
dry_run: true
- execute_lambda:
name: test-function
payload:
foo: bar
value: 8
wait: true
tail_log: true
register: response
# the response will have a `logs` key that will contain a log (up to 4KB) of the function execution in Lambda.
- execute_lambda:
name: test-function
version_qualifier: PRODUCTION
'''
RETURN = '''
output:
description: Function output if wait=true and the function returns a value
returned: success
type: dict
sample: "{ 'output': 'something' }"
logs:
description: The last 4KB of the function logs. Only provided if I(tail_log) is true
type: string
returned: if I(tail_log) == true
status:
description: C(StatusCode) of API call exit (200 for synchronous invokes, 202 for async)
type: int
sample: 200
returned: always
'''
import base64
import json
import traceback
try:
import botocore
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import boto3_conn, ec2_argument_spec, get_aws_connection_info
from ansible.module_utils._text import to_native
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
name=dict(),
function_arn=dict(),
wait=dict(default=True, type='bool'),
tail_log=dict(default=False, type='bool'),
dry_run=dict(default=False, type='bool'),
version_qualifier=dict(),
payload=dict(default={}, type='dict'),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=[
['name', 'function_arn'],
]
)
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
name = module.params.get('name')
function_arn = module.params.get('function_arn')
await_return = module.params.get('wait')
dry_run = module.params.get('dry_run')
tail_log = module.params.get('tail_log')
version_qualifier = module.params.get('version_qualifier')
payload = module.params.get('payload')
if not HAS_BOTO3:
module.fail_json(msg='Python module "boto3" is missing, please install it')
if not (name or function_arn):
module.fail_json(msg="Must provide either a function_arn or a name to invoke.")
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=HAS_BOTO3)
if not region:
module.fail_json(msg="The AWS region must be specified as an "
"environment variable or in the AWS credentials "
"profile.")
try:
client = boto3_conn(module, conn_type='client', resource='lambda',
region=region, endpoint=ec2_url, **aws_connect_kwargs)
except (botocore.exceptions.ClientError, botocore.exceptions.ValidationError) as e:
module.fail_json(msg="Failure connecting boto3 to AWS: %s" % to_native(e), exception=traceback.format_exc())
invoke_params = {}
if await_return:
# await response
invoke_params['InvocationType'] = 'RequestResponse'
else:
# fire and forget
invoke_params['InvocationType'] = 'Event'
if dry_run or module.check_mode:
# dry_run overrides invocation type
invoke_params['InvocationType'] = 'DryRun'
if tail_log and await_return:
invoke_params['LogType'] = 'Tail'
elif tail_log and not await_return:
module.fail_json(msg="The `tail_log` parameter is only available if "
"the invocation waits for the function to complete. "
"Set `wait` to true or turn off `tail_log`.")
else:
invoke_params['LogType'] = 'None'
if version_qualifier:
invoke_params['Qualifier'] = version_qualifier
if payload:
invoke_params['Payload'] = json.dumps(payload)
if function_arn:
invoke_params['FunctionName'] = function_arn
elif name:
invoke_params['FunctionName'] = name
try:
response = client.invoke(**invoke_params)
except botocore.exceptions.ClientError as ce:
if ce.response['Error']['Code'] == 'ResourceNotFoundException':
module.fail_json(msg="Could not find Lambda to execute. Make sure "
"the ARN is correct and your profile has "
"permissions to execute this function.",
exception=traceback.format_exc())
module.fail_json(msg="Client-side error when invoking Lambda, check inputs and specific error",
exception=traceback.format_exc())
except botocore.exceptions.ParamValidationError as ve:
module.fail_json(msg="Parameters to `invoke` failed to validate",
exception=traceback.format_exc(ve))
except Exception as e:
module.fail_json(msg="Unexpected failure while invoking Lambda function",
exception=traceback.format_exc())
results = {
'logs': '',
'status': response['StatusCode'],
'output': '',
}
if response.get('LogResult'):
try:
# logs are base64 encoded in the API response
results['logs'] = base64.b64decode(response.get('LogResult', ''))
except Exception as e:
module.fail_json(msg="Failed while decoding logs", exception=traceback.format_exc())
if invoke_params['InvocationType'] == 'RequestResponse':
try:
results['output'] = json.loads(response['Payload'].read().decode('utf8'))
except Exception as e:
module.fail_json(msg="Failed while decoding function return value", exception=traceback.format_exc())
if isinstance(results.get('output'), dict) and any(
[results['output'].get('stackTrace'), results['output'].get('errorMessage')]):
# AWS sends back stack traces and error messages when a function failed
# in a RequestResponse (synchronous) context.
template = ("Function executed, but there was an error in the Lambda function. "
"Message: {errmsg}, Type: {type}, Stack Trace: {trace}")
error_data = {
# format the stacktrace sent back as an array into a multiline string
'trace': '\n'.join(
[' '.join([
str(x) for x in line # cast line numbers to strings
]) for line in results.get('output', {}).get('stackTrace', [])]
),
'errmsg': results['output'].get('errorMessage'),
'type': results['output'].get('errorType')
}
module.fail_json(msg=template.format(**error_data), result=results)
module.exit_json(changed=True, result=results)
if __name__ == '__main__':
main()
|
pmacosta/pexdoc | refs/heads/master | tests/pinspect.py | 1 | # pinspect.py
# Copyright (c) 2013-2019 Pablo Acosta-Serafini
# See LICENSE for details
# pylint: disable=C0103,C0111,C0413,E0611,F0401
# pylint: disable=R0201,R0205,R0903,R0913,R0914,R0915
# pylint: disable=W0104,W0212,W0232,W0611,W0612,W0613,W0621
# Standard library imports
from __future__ import print_function
from functools import partial
import copy
import os
import sys
import time
import types
# PyPI imports
import pmisc
from pmisc import AE, AI, CS, GET_EXMSG, RE
import pytest
if sys.hexversion == 0x03000000:
from pexdoc.compat3 import _readlines
# Intra-package imports
import pexdoc.pinspect
###
# Helper functions
###
modfile = lambda x: sys.modules[x].__file__
###
# Tests for module functions
###
def test_private_props():
"""Test private_props function behavior."""
obj = pexdoc.pinspect.Callables()
assert sorted(list(pexdoc.pinspect.private_props(obj))) == [
"_callables_db",
"_class_names",
"_fnames",
"_module_names",
"_modules_dict",
"_reverse_callables_db",
]
if sys.hexversion == 0x03000000:
def test_readlines(): # noqa: D202
"""Test _readlines function behavior."""
def mopen1(fname, mode):
raise RuntimeError("Mock mopen1 function")
def mopen2(fname, mode):
text = chr(40960) + "abcd" + chr(1972)
# Next line raises UnicodeDecodeError
b"\x80abc".decode("utf-8", "strict")
class MockOpenCls(object):
def __init__(self, fname, mode, encoding):
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
if exc_type is not None:
return False
return True
def readlines(self):
return "MockOpenCls"
pkg_dir = os.path.abspath(os.path.dirname(__file__))
fname = os.path.join(pkg_dir, "test_misc.py")
# This should not trigger an exception (functionality checked
# by other unit tests)
_readlines(fname)
# Trigger unrelated exception exception
obj = _readlines
with pytest.raises(RuntimeError) as excinfo:
_readlines(fname, mopen1)
assert GET_EXMSG(excinfo) == "Mock mopen1 function"
# Trigger UnicodeDecodeError exception
assert _readlines(fname, mopen2, MockOpenCls) == "MockOpenCls"
def test_object_is_module():
"""Test object_is_module() function."""
assert not pexdoc.pinspect.is_object_module(5)
assert pexdoc.pinspect.is_object_module(sys.modules["pexdoc.pinspect"])
def test_get_module_name():
"""Test get_module_name() function."""
obj = pexdoc.pinspect.get_module_name
AI(obj, "module_obj", module_obj=5)
mock_module_obj = types.ModuleType("mock_module_obj", "Mock module")
exmsg = "Module object `mock_module_obj` could not be found in loaded modules"
AE(obj, RE, exmsg, module_obj=mock_module_obj)
ref = "pexdoc.pinspect"
assert pexdoc.pinspect.get_module_name(sys.modules[ref]) == ref
assert pexdoc.pinspect.get_module_name(sys.modules["pexdoc"]) == "pexdoc"
def test_get_module_name_from_fname():
"""Test _get_module_name_from_fname() function."""
obj = pexdoc.pinspect._get_module_name_from_fname
AE(obj, RE, "Module could not be found", fname="_not_a_module")
assert obj(modfile("pexdoc.pinspect")) == "pexdoc.pinspect"
def test_is_special_method():
"""Test is_special_method() function."""
assert not pexdoc.pinspect.is_special_method("func_name")
assert not pexdoc.pinspect.is_special_method("_func_name_")
assert pexdoc.pinspect.is_special_method("__func_name__")
###
# Test for classes
###
class TestCallables(object):
"""Test for Callables."""
def test_check_intersection(self):
"""Test _check_intersection method behavior."""
obj1 = pexdoc.pinspect.Callables()
obj1._callables_db = {"call1": 1, "call2": 2}
obj2 = pexdoc.pinspect.Callables()
obj2._callables_db = {"call1": 1, "call2": "a"}
exmsg = "Conflicting information between objects"
obj = obj1._check_intersection
AE(obj, RE, exmsg, other=obj2)
obj1._callables_db = {"call1": 1, "call2": ["a", "c"]}
obj2._callables_db = {"call1": 1, "call2": ["a", "b"]}
AE(obj, RE, exmsg, other=obj2)
obj1._callables_db = {"call1": 1, "call2": {"a": "b"}}
obj2._callables_db = {"call1": 1, "call2": {"a": "c"}}
AE(obj, RE, exmsg, other=obj2)
obj1._callables_db = {"call1": 1, "call2": "a"}
obj2._callables_db = {"call1": 1, "call2": "c"}
AE(obj, RE, exmsg, other=obj2)
obj1._callables_db = {"call1": 1, "call2": "a"}
obj2._callables_db = {"call1": 1, "call2": "a"}
assert obj1._check_intersection(obj2) is None
def test_init_exceptions(self):
"""Test constructor exceptions."""
obj = pexdoc.pinspect.Callables
for item in [5, [5]]:
AI(obj, "fnames", fnames=item)
exmsg = "File _not_a_file_ could not be found"
AE(obj, OSError, exmsg, fnames=["_not_a_file_"])
def test_add(self):
"""Test __add__ __radd__ method behavior."""
obj1 = pexdoc.pinspect.Callables()
obj1._callables_db = {"call1": {"a": 5, "b": 6}, "call2": {"a": 7, "b": 8}}
obj1._reverse_callables_db = {"rc1": "5", "rc2": "7"}
obj1._modules_dict = {"key1": {"entry": "alpha"}, "key2": {"entry": "beta"}}
obj1._fnames = {"hello": 0}
obj1._module_names = ["this", "is"]
obj1._class_names = ["once", "upon"]
#
obj2 = pexdoc.pinspect.Callables()
obj2._callables_db = {
"call3": {"a": 10, "b": 100},
"call4": {"a": 200, "b": 300},
}
obj2._reverse_callables_db = {"rc3": "0", "rc4": "1"}
obj2._modules_dict = {"key3": {"entry": "pi"}, "key4": {"entry": "gamma"}}
obj2._fnames = {"world": 1}
obj2._module_names = ["a", "test"]
obj2._class_names = ["a", "time"]
#
obj1._callables_db = {"call3": {"a": 5, "b": 6}, "call2": {"a": 7, "b": 8}}
with pytest.raises(RuntimeError) as excinfo:
obj1 + obj2
assert GET_EXMSG(excinfo) == "Conflicting information between objects"
obj1._callables_db = {"call1": {"a": 5, "b": 6}, "call2": {"a": 7, "b": 8}}
#
obj2._reverse_callables_db = {"rc3": "5", "rc2": "-1"}
with pytest.raises(RuntimeError) as excinfo:
obj1 + obj2
assert GET_EXMSG(excinfo) == "Conflicting information between objects"
obj2._reverse_callables_db = {"rc3": "0", "rc4": "-1"}
#
obj2._modules_dict = {"key1": {"entry": "pi"}, "key4": {"entry": "gamma"}}
with pytest.raises(RuntimeError) as excinfo:
obj1 + obj2
assert GET_EXMSG(excinfo) == "Conflicting information between objects"
obj2._modules_dict = {"key3": {"entry": "pi"}, "key4": {"entry": "gamma"}}
# Test when intersection is the same
obj2._modules_dict = {"key1": {"entry": "alpha"}, "key4": {"entry": "gamma"}}
obj1 + obj2
obj2._modules_dict = {"key3": {"entry": "pi"}, "key4": {"entry": "gamma"}}
#
sobj = obj1 + obj2
scomp = lambda x, y: sorted(x) == sorted(y)
ref = {
"call1": {"a": 5, "b": 6},
"call2": {"a": 7, "b": 8},
"call3": {"a": 10, "b": 100},
"call4": {"a": 200, "b": 300},
}
assert scomp(sobj._callables_db, ref)
ref = {"rc1": "5", "rc2": "7", "rc3": "0", "rc4": "-1"}
assert scomp(sobj._reverse_callables_db, ref)
ref = {
"key1": {"entry": "alpha"},
"key2": {"entry": "beta"},
"key3": {"entry": "pi"},
"key4": {"entry": "gamma"},
}
assert scomp(sobj._modules_dict, ref)
assert scomp(sobj._fnames, {"hello": 0, "world": 1})
assert scomp(sobj._module_names, ["this", "is", "a", "test"])
assert scomp(sobj._class_names, ["once", "upon", "a", "time"])
#
obj1 += obj2
ref = {
"call1": {"a": 5, "b": 6},
"call2": {"a": 7, "b": 8},
"call3": {"a": 10, "b": 100},
"call4": {"a": 200, "b": 300},
}
assert scomp(obj1._callables_db, ref)
ref = {"rc1": "5", "rc2": "7", "rc3": "0", "rc4": "-1"}
assert scomp(obj1._reverse_callables_db, ref)
ref = {
"key1": {"entry": "alpha"},
"key2": {"entry": "beta"},
"key3": {"entry": "pi"},
"key4": {"entry": "gamma"},
}
assert scomp(obj1._modules_dict, ref)
assert scomp(obj1._fnames, {"hello": 0, "world": 1})
assert scomp(obj1._module_names, ["this", "is", "a", "test"])
assert scomp(obj1._class_names, ["once", "upon", "a", "time"])
def test_copy(self):
"""Test __copy__ method behavior."""
sobj = pexdoc.pinspect.Callables()
import tests.support.pinspect_support_module_1
sobj.trace([modfile("tests.support.pinspect_support_module_1")])
dobj = copy.copy(sobj)
assert sobj._module_names == dobj._module_names
assert id(sobj._module_names) != id(dobj._module_names)
assert sobj._class_names == dobj._class_names
assert id(sobj._class_names) != id(dobj._class_names)
assert sobj._callables_db == dobj._callables_db
assert id(sobj._callables_db) != id(dobj._callables_db)
assert sobj._reverse_callables_db == dobj._reverse_callables_db
assert id(sobj._reverse_callables_db) != id(dobj._reverse_callables_db)
def test_eq(self):
"""Test __eq__ method behavior."""
obj1 = pexdoc.pinspect.Callables()
obj2 = pexdoc.pinspect.Callables()
obj3 = pexdoc.pinspect.Callables()
import tests.support.pinspect_support_module_1
import tests.support.pinspect_support_module_2
mname = "tests.support.pinspect_support_module_1"
obj1.trace([modfile(mname)])
obj2.trace([modfile(mname)])
obj3.trace([modfile("pmisc")])
assert (obj1 == obj2) and (obj1 != obj3)
assert obj1 != 5
def test_repr(self):
"""Test __repr__ method behavior."""
get_name = lambda x: modfile(x).replace(".pyc", ".py")
import tests.support.exdoc_support_module_1
file1 = get_name("tests.support.exdoc_support_module_1")
file2 = get_name("tests.support.exdoc_support_module_2")
xobj = pexdoc.pinspect.Callables([file2])
xobj.trace([file1])
ref = "pexdoc.pinspect.Callables([{0}, {1}])".format(repr(file1), repr(file2))
assert repr(xobj) == ref
def test_str_empty(self):
"""Test __str__ magic method when object is empty."""
obj = pexdoc.pinspect.Callables()
assert str(obj) == ""
def test_refresh(self):
"""Test refresh method behavior."""
ref = modfile("pexdoc.pinspect")
src = os.path.join(os.path.dirname(ref), "pit.py")
with open(src, "w") as fobj:
fobj.write(
"class MyClass(object):\n" " pass\n" "def func1():\n" " pass\n"
)
import pexdoc.pit
obj = pexdoc.pinspect.Callables([ref, src])
tmod = obj._fnames[src]
obj.trace([src])
assert obj._fnames[src] == tmod
cname1 = "pexdoc.pinspect.Callables"
cname2 = "pexdoc.pinspect._AstTreeScanner"
rtext = (
"Modules:\n",
" pexdoc.pinspect\n",
" pexdoc.pit\n",
"Classes:\n",
" pexdoc.pinspect.Callables\n",
" pexdoc.pinspect._AstTreeScanner\n",
" pexdoc.pit.MyClass\n",
"pexdoc.pinspect._get_module_name_from_fname: func (40-53)\n",
"pexdoc.pinspect._validate_fname: func (54-65)\n",
"pexdoc.pinspect.get_function_args: func (66-128)\n",
"pexdoc.pinspect.get_module_name: func (129-158)\n",
"pexdoc.pinspect.is_object_module: func (159-170)\n",
"pexdoc.pinspect.is_special_method: func (171-182)\n",
"pexdoc.pinspect.private_props: func (183-206)\n",
cname1 + ": class (207-772)\n",
cname1 + ".__init__: meth (234-243)\n",
cname1 + ".__add__: meth (244-283)\n",
cname1 + ".__bool__: meth (284-306)\n",
cname1 + ".__copy__: meth (307-325)\n",
cname1 + ".__eq__: meth (326-355)\n",
cname1 + ".__iadd__: meth (356-391)\n",
cname1 + ".__nonzero__: meth (392-414)\n",
cname1 + ".__repr__: meth (415-433)\n",
cname1 + ".__str__: meth (434-483)\n",
cname1 + "._check_intersection: meth (484-507)\n",
cname1 + "._get_callables_db: meth (508-511)\n",
cname1 + ".get_callable_from_line: meth (512-526)\n",
cname1 + "._get_reverse_callables_db: meth (527-530)\n",
cname1 + ".load: meth (531-577)\n",
cname1 + ".refresh: meth (578-581)\n",
cname1 + ".save: meth (582-610)\n",
cname1 + ".trace: meth (611-697)\n",
cname1 + ".callables_db: prop (698-730)\n",
cname1 + ".reverse_callables_db: prop (731-772)\n",
cname2 + ": class (773-1119)\n",
cname2 + ".__init__: meth (777-790)\n",
cname2 + "._close_callable: meth (791-904)\n",
cname2 + "._get_indent: meth (905-912)\n",
cname2 + "._in_class: meth (913-920)\n",
cname2 + "._pop_indent_stack: meth (921-966)\n",
cname2 + ".generic_visit: meth (967-980)\n",
cname2 + ".visit_arguments: meth (981-988)\n",
cname2 + ".visit_Assign: meth (989-1030)\n",
cname2 + ".visit_ClassDef: meth (1031-1065)\n",
cname2 + ".visit_FunctionDef: meth (1066-1119)\n",
"pexdoc.pit.MyClass: class (1-2)\n",
"pexdoc.pit.func1: func (3-4)",
)
CS(str(obj), "".join(rtext))
ftime = int(os.path.getmtime(src))
while int(time.time()) <= ftime:
time.sleep(0.1)
os.remove(src)
content = "def my_func():\n pass"
with open(src, "w") as fobj:
fobj.write(content)
obj.refresh()
assert obj._fnames[src] != tmod
rtext = (
"Modules:\n",
" pexdoc.pinspect\n",
" pexdoc.pit\n",
"Classes:\n",
" pexdoc.pinspect.Callables\n",
" pexdoc.pinspect._AstTreeScanner\n",
"pexdoc.pinspect._get_module_name_from_fname: func (40-53)\n",
"pexdoc.pinspect._validate_fname: func (54-65)\n",
"pexdoc.pinspect.get_function_args: func (66-128)\n",
"pexdoc.pinspect.get_module_name: func (129-158)\n",
"pexdoc.pinspect.is_object_module: func (159-170)\n",
"pexdoc.pinspect.is_special_method: func (171-182)\n",
"pexdoc.pinspect.private_props: func (183-206)\n",
cname1 + ": class (207-772)\n",
cname1 + ".__init__: meth (234-243)\n",
cname1 + ".__add__: meth (244-283)\n",
cname1 + ".__bool__: meth (284-306)\n",
cname1 + ".__copy__: meth (307-325)\n",
cname1 + ".__eq__: meth (326-355)\n",
cname1 + ".__iadd__: meth (356-391)\n",
cname1 + ".__nonzero__: meth (392-414)\n",
cname1 + ".__repr__: meth (415-433)\n",
cname1 + ".__str__: meth (434-483)\n",
cname1 + "._check_intersection: meth (484-507)\n",
cname1 + "._get_callables_db: meth (508-511)\n",
cname1 + ".get_callable_from_line: meth (512-526)\n",
cname1 + "._get_reverse_callables_db: meth (527-530)\n",
cname1 + ".load: meth (531-577)\n",
cname1 + ".refresh: meth (578-581)\n",
cname1 + ".save: meth (582-610)\n",
cname1 + ".trace: meth (611-697)\n",
cname1 + ".callables_db: prop (698-730)\n",
cname1 + ".reverse_callables_db: prop (731-772)\n",
cname2 + ": class (773-1119)\n",
cname2 + ".__init__: meth (777-790)\n",
cname2 + "._close_callable: meth (791-904)\n",
cname2 + "._get_indent: meth (905-912)\n",
cname2 + "._in_class: meth (913-920)\n",
cname2 + "._pop_indent_stack: meth (921-966)\n",
cname2 + ".generic_visit: meth (967-980)\n",
cname2 + ".visit_arguments: meth (981-988)\n",
cname2 + ".visit_Assign: meth (989-1030)\n",
cname2 + ".visit_ClassDef: meth (1031-1065)\n",
cname2 + ".visit_FunctionDef: meth (1066-1119)\n",
"pexdoc.pit.my_func: func (1-2)",
)
CS(str(obj), "".join(rtext))
## Test malformed JSON file
obj = pexdoc.pinspect.Callables()
json_src = os.path.join(os.path.dirname(ref), "pit.json")
json_txt = (
"{{\n"
' "_callables_db": {{\n'
' "pexdoc.pit.my_func": {{\n'
' "code_id": [\n'
' "{pyfile}",\n'
" 1\n"
" ],\n"
' "last_lineno": 2,\n'
' "name": "pexdoc.pit.my_func",\n'
' "type": "func"\n'
" }}\n"
" }},\n"
' "_class_names": [],\n'
' "_fnames": {{\n'
' "{pyfile}": {{\n'
' "classes": [],\n'
' "date": 1,\n'
' "name": "pexdoc.pit"\n'
" }}\n"
" }},\n"
' "_module_names": [\n'
' "pexdoc.pit"\n'
" ],\n"
' "_modules_dict": {{\n'
' "pexdoc.pit": [\n'
" {{\n"
' "code_id": [\n'
' "{pyfile}",\n'
" 1\n"
" ],\n"
' "last_lineno": 2,\n'
' "name": "pexdoc.pit.my_func",\n'
' "type": "func"\n'
" }}\n"
" ]\n"
" }},\n"
' "_reverse_callables_db": {{\n'
' "(\'{pyfile}\', 1)": "pexdoc.pit.my_func",\n'
' "(\'{pyfile}\', 10)": "pexdoc.pit.my_func"\n'
" }}\n"
"}}\n"
)
with open(json_src, "w") as fobj:
fobj.write(json_txt.format(pyfile=src.replace("\\", "/")))
obj.load(json_src)
obj.refresh()
os.remove(json_src)
os.remove(src)
def test_load_save(self):
"""Test load and save methods behavior."""
import tests.support.csv_file
import tests.support.exdoc_support_module_1
# Empty object
obj1 = pexdoc.pinspect.Callables()
with pmisc.TmpFile() as fname:
obj1.save(fname)
obj2 = pexdoc.pinspect.Callables()
obj2.load(fname)
assert obj1 == obj2
# 1 module trace
mname = "tests.support.csv_file"
cname = "{0}.CsvFile".format(mname)
obj1 = pexdoc.pinspect.Callables([modfile(mname)])
with pmisc.TmpFile() as fname:
obj1.save(fname)
obj2 = pexdoc.pinspect.Callables()
assert not bool(obj2)
obj2.load(fname)
assert obj1 == obj2
# Test merging of traced and file-based module information
mname1 = "tests.support.csv_file"
obj1 = pexdoc.pinspect.Callables([modfile(mname1)])
mname2 = "tests.support.exdoc_support_module_1"
obj2 = pexdoc.pinspect.Callables([modfile(mname2)])
with pmisc.TmpFile() as fname1:
with pmisc.TmpFile() as fname2:
obj1.save(fname1)
obj2.save(fname2)
obj3 = pexdoc.pinspect.Callables([modfile(mname1), modfile(mname2)])
obj4 = pexdoc.pinspect.Callables()
obj4.load(fname2)
obj4.load(fname1)
assert obj3 == obj4
def test_load_exceptions(self):
"""Test load method exceptions."""
obj = pexdoc.pinspect.Callables()
for item in [True, 5]:
AI(obj.load, "callables_fname", callables_fname=item)
exmsg = "File _not_a_file_ could not be found"
AE(obj.load, OSError, exmsg, callables_fname="_not_a_file_")
def test_save_exceptions(self):
"""Test save method exceptions."""
obj = pexdoc.pinspect.Callables()
for item in [True, 5]:
AI(obj.save, "callables_fname", callables_fname=item)
def test_trace(self):
"""Test trace method behavior."""
import tests.support.csv_file
mname = "tests.support.csv_file"
cname = "{0}.CsvFile".format(mname)
xobj = pexdoc.pinspect.Callables([modfile(mname)])
ref = []
ref.append("Modules:")
ref.append(" {0}".format(mname))
ref.append("Classes:")
ref.append(" {0}".format(cname))
ref.append("{0}._homogenize_data_filter: func (52-77)".format(mname))
ref.append("{0}._isnumber: func (78-86)".format(mname))
ref.append("{0}._tofloat: func (87-99)".format(mname))
ref.append("{0}._write_int: func (100-123)".format(mname))
ref.append("{0}: class (124-960)".format(cname))
ref.append("{0}.__init__: meth (176-242)".format(cname))
ref.append("{0}.__eq__: meth (243-279)".format(cname))
ref.append("{0}.__repr__: meth (280-316)".format(cname))
ref.append("{0}.__str__: meth (317-362)".format(cname))
ref.append("{0}._format_rfilter: meth (363-383)".format(cname))
ref.append("{0}._gen_col_index: meth (384-394)".format(cname))
ref.append("{0}._get_cfilter: meth (395-397)".format(cname))
ref.append("{0}._get_dfilter: meth (398-400)".format(cname))
ref.append("{0}._get_rfilter: meth (401-403)".format(cname))
ref.append("{0}._reset_dfilter_int: meth (404-409)".format(cname))
ref.append("{0}._in_header: meth (410-437)".format(cname))
ref.append("{0}._set_cfilter: meth (438-442)".format(cname))
ref.append("{0}._set_dfilter: meth (443-448)".format(cname))
ref.append("{0}._set_rfilter: meth (449-453)".format(cname))
ref.append("{0}._add_dfilter_int: meth (454-493)".format(cname))
ref.append("{0}._apply_filter: meth (494-522)".format(cname))
ref.append("{0}._set_has_header: meth (523-526)".format(cname))
ref.append("{0}._validate_frow: meth (527-532)".format(cname))
ref.append("{0}._validate_rfilter: meth (533-556)".format(cname))
ref.append("{0}.add_dfilter: meth (557-581)".format(cname))
ref.append("{0}.cols: meth (582-601)".format(cname))
ref.append("{0}.data: meth (602-632)".format(cname))
ref.append("{0}.dsort: meth (633-682)".format(cname))
ref.append("{0}.header: meth (683-715)".format(cname))
ref.append("{0}.replace: meth (716-781)".format(cname))
ref.append("{0}.reset_dfilter: meth (782-799)".format(cname))
ref.append("{0}.rows: meth (800-819)".format(cname))
ref.append("{0}.write: meth (820-889)".format(cname))
ref.append("{0}.cfilter: prop (890-912)".format(cname))
ref.append("{0}.dfilter: prop (913-936)".format(cname))
ref.append("{0}.rfilter: prop (937-960)".format(cname))
ref_txt = "\n".join(ref)
actual_txt = str(xobj)
CS(actual_txt, ref_txt)
#
import tests.support.exdoc_support_module_1
mname = "tests.support.exdoc_support_module_1"
xobj = pexdoc.pinspect.Callables([modfile(mname)])
ref = []
cname = "{0}.ExceptionAutoDocClass".format(mname)
ref.append("Modules:")
ref.append(" {0}".format(mname))
ref.append("Classes:")
ref.append(" {0}".format(cname))
ref.append(" {0}.MyClass".format(mname))
ref.append("{0}._validate_arguments: func (18-31)".format(mname))
ref.append("{0}._write: func (32-36)".format(mname))
ref.append("{0}.write: func (37-48)".format(mname))
ref.append("{0}.read: func (49-58)".format(mname))
ref.append("{0}.probe: func (59-68)".format(mname))
ref.append("{0}.dummy_decorator1: func (69-73)".format(mname))
ref.append("{0}.dummy_decorator2: func (74-88)".format(mname))
ref.append("{0}.dummy_decorator2.wrapper: func (82-85)".format(mname))
ref.append("{0}.mlmdfunc: func (89-97)".format(mname))
ref.append("{0}: class (98-220)".format(cname))
ref.append("{0}.__init__: meth (102-112)".format(cname))
ref.append("{0}._del_value3: meth (113-118)".format(cname))
ref.append("{0}._get_value3: meth (119-125)".format(cname))
ref.append("{0}._set_value1: meth (126-134)".format(cname))
ref.append("{0}._set_value2: meth (135-148)".format(cname))
ref.append("{0}._set_value3: meth (149-157)".format(cname))
ref.append("{0}.add: meth (158-165)".format(cname))
ref.append("{0}.subtract: meth (166-173)".format(cname))
ref.append("{0}.multiply: meth (174-181)".format(cname))
ref.append("{0}.divide: meth (182-186)".format(cname))
ref.append("{0}.temp(getter): meth (187-191)".format(cname))
ref.append("{0}.temp(setter): meth (192-197)".format(cname))
ref.append("{0}.temp(deleter): meth (198-203)".format(cname))
ref.append("{0}.value1: prop (204-211)".format(cname))
ref.append("{0}.value2: prop (212-214)".format(cname))
ref.append("{0}.value3: prop (215-216)".format(cname))
ref.append("{0}.value4: prop (217-220)".format(cname))
ref.append("{0}.my_func: func (221-224)".format(mname))
ref.append("{0}.MyClass: class (225-228)".format(mname))
ref.append("{0}.MyClass.value: prop (228)".format(mname))
ref_txt = "\n".join(ref)
actual_txt = str(xobj)
CS(actual_txt, ref_txt)
#
import tests.exdoc
mname = "tests.exdoc"
froot = "{0}.exdocobj".format(mname)
xobj = pexdoc.pinspect.Callables([modfile(mname)])
cname1 = "{0}.TestExDocCxt".format(mname)
cname2 = "{0}.TestExDoc".format(mname)
mename1 = "{0}.test_multiple".format(cname1)
mename2 = "{0}.test_build_ex_tree".format(cname2)
meroot = "{0}.test_get_sphinx".format(cname2)
ref = []
ref.append("Modules:")
ref.append(" {0}".format(mname))
ref.append("Classes:")
ref.append(" {0}.MockFCode".format(mname))
ref.append(" {0}.MockGetFrame".format(mname))
ref.append(" {0}.TestExDoc".format(mname))
ref.append(" {0}.TestExDocCxt".format(mname))
ref.append("{0}: func (54-91)".format(froot))
ref.append("{0}.multi_level_write: func (60-68)".format(froot))
ref.append("{0}_raised: func (92-105)".format(froot))
ref.append("{0}_single: func (106-115)".format(froot))
ref.append("{0}.simple_exobj: func (116-131)".format(mname))
ref.append("{0}.simple_exobj.func1: func (122-124)".format(mname))
ref.append("{0}.mock_getframe: func (132-135)".format(mname))
ref.append("{0}.trace_error_class: func (136-147)".format(mname))
ref.append("{0}.MockFCode: class (148-153)".format(mname))
ref.append("{0}.MockFCode.__init__: meth (149-153)".format(mname))
ref.append("{0}.MockGetFrame: class (154-161)".format(mname))
ref.append("{0}.MockGetFrame.__init__: meth (155-161)".format(mname))
ref.append("{0}: class (162-277)".format(cname1))
ref.append("{0}.test_init: meth (165-220)".format(cname1))
ref.append("{0}.test_init.check_ctx1: func (168-174)".format(cname1))
ref.append("{0}.test_init.check_ctx2: func (175-182)".format(cname1))
ref.append("{0}.test_init.func0: func (183-190)".format(cname1))
ref.append("{0}.test_multiple: meth (221-259)".format(cname1))
ref.append("{0}.func1: func (224-231)".format(mename1))
ref.append("{0}.test_trace: func (232-247)".format(mename1))
ref.append("{0}.test_save_callables: meth (260-277)".format(cname1))
ref.append("{0}: class (278-747)".format(cname2))
ref.append("{0}.test_init: meth (281-297)".format(cname2))
ref.append("{0}.test_copy: meth (298-313)".format(cname2))
ref.append("{0}.test_build_ex_tree: meth (314-425)".format(cname2))
ref.append("{0}.func1: func (322-326)".format(mename2))
ref.append("{0}.mock_add_nodes1: func (329-331)".format(mename2))
ref.append("{0}.mock_add_nodes2: func (332-334)".format(mename2))
ref.append("{0}.mock_add_nodes3: func (335-337)".format(mename2))
ref.append("{0}.test_depth: meth (426-433)".format(cname2))
ref.append("{0}.test_exclude: meth (434-441)".format(cname2))
ref.append("{0}_autodoc: meth (442-471)".format(meroot))
ref.append("{0}_doc: meth (472-747)".format(meroot))
ref_txt = "\n".join(ref)
actual_txt = str(xobj)
CS(actual_txt, ref_txt)
#
import tests.support.pinspect_support_module_4
mname = "tests.support.pinspect_support_module_4"
xobj = pexdoc.pinspect.Callables([modfile(mname)])
ref = []
fname = "{0}.another_property_action_enclosing_function".format(mname)
ref.append("Modules:")
ref.append(" {0}".format(mname))
ref.append("{0}: func (17-24)".format(fname))
ref.append("{0}.fget: func (20-23)".format(fname))
ref_txt = "\n".join(ref)
actual_txt = str(xobj)
CS(actual_txt, ref_txt)
# Test re-tries, should produce no action and raise no exception
xobj = pexdoc.pinspect.Callables([modfile(mname)])
import tests.support.pinspect_support_module_10
mname = "tests.support.pinspect_support_module_10"
xobj = pexdoc.pinspect.Callables([modfile(mname)])
ref = []
cname = "{0}.AClass".format(mname)
ref.append("Modules:")
ref.append(" {0}".format(mname))
ref.append("Classes:")
ref.append(" {0}".format(cname))
ref.append(" {0}.method1.SubClass".format(cname))
ref.append("{0}: class (7-32)".format(cname))
ref.append("{0}.method1: meth (11-29)".format(cname))
ref.append("{0}.method1.func1: func (15-19)".format(cname))
ref.append("{0}.method1.SubClass: class (22-27)".format(cname))
ref.append("{0}.method1.SubClass.__init__: meth (25-27)".format(cname))
ref.append("{0}.method2: meth (30-32)".format(cname))
ref_txt = "\n".join(ref)
actual_txt = str(xobj)
CS(actual_txt, ref_txt)
def test_callables_db(self):
"""Test callables_db property."""
import tests.support.pinspect_support_module_4
mname = "tests.support.pinspect_support_module_4"
xobj = pexdoc.pinspect.Callables([modfile(mname)])
pkg_dir = os.path.dirname(__file__)
ref = {
"tests.support.pinspect_support_module_4."
"another_property_action_enclosing_function": {
"code_id": (
os.path.join(pkg_dir, "support", "pinspect_support_module_4.py"),
16,
),
"last_lineno": 21,
"name": "pinspect_support_module_4."
"another_property_action_enclosing_function",
"type": "func",
},
"tests.support.pinspect_support_module_4."
"another_property_action_enclosing_function.fget": {
"code_id": (
os.path.join(pkg_dir, "support", "pinspect_support_module_4.py"),
18,
),
"last_lineno": 20,
"name": "pinspect_support_module_4."
"another_property_action_enclosing_function.fget",
"type": "func",
},
}
assert sorted(xobj.callables_db) == sorted(ref)
ref = {
(os.path.join(pkg_dir, "support", "pinspect_support_module_4.py"), 17): (
"pinspect_support_module_4."
"another_property_action_enclosing_function"
),
(os.path.join(pkg_dir, "support", "pinspect_support_module_4.py"), 20): (
"pinspect_support_module_4."
"another_property_action_enclosing_function.fget"
),
}
assert sorted(xobj.reverse_callables_db) == sorted(ref)
def test_get_callable_from_line(self):
"""Test get_callable_from_line() function."""
xobj = pexdoc.pinspect.Callables()
import tests.support.pinspect_support_module_4
fname = modfile("tests.support.pinspect_support_module_4")
ref = (
"tests.support.pinspect_support_module_4."
"another_property_action_enclosing_function"
)
assert xobj.get_callable_from_line(fname, 17) == ref
xobj = pexdoc.pinspect.Callables([fname])
ref = (
"tests.support.pinspect_support_module_4."
"another_property_action_enclosing_function"
)
assert xobj.get_callable_from_line(fname, 17) == ref
ref = (
"tests.support.pinspect_support_module_4."
"another_property_action_enclosing_function"
)
assert xobj.get_callable_from_line(fname, 18) == ref
ref = (
"tests.support.pinspect_support_module_4."
"another_property_action_enclosing_function"
)
assert xobj.get_callable_from_line(fname, 24) == ref
ref = (
"tests.support.pinspect_support_module_4."
"another_property_action_enclosing_function.fget"
)
assert xobj.get_callable_from_line(fname, 20) == ref
ref = (
"tests.support.pinspect_support_module_4."
"another_property_action_enclosing_function.fget"
)
assert xobj.get_callable_from_line(fname, 21) == ref
ref = (
"tests.support.pinspect_support_module_4."
"another_property_action_enclosing_function.fget"
)
assert xobj.get_callable_from_line(fname, 22) == ref
ref = "tests.support.pinspect_support_module_4"
assert xobj.get_callable_from_line(fname, 100) == ref
##
# Tests for get_function_args()
###
class TestGetFunctionArgs(object):
"""Tests for get_function_args function."""
def test_all_positional_arguments(self): # noqa: D202
"""Test function when all arguments are positional arguments."""
def func(ppar1, ppar2, ppar3):
pass
obj = pexdoc.pinspect.get_function_args
assert obj(func) == ("ppar1", "ppar2", "ppar3")
def test_all_keyword_arguments(self): # noqa: D202
"""Test function when all arguments are keywords arguments."""
def func(kpar1=1, kpar2=2, kpar3=3):
pass
obj = pexdoc.pinspect.get_function_args
assert obj(func) == ("kpar1", "kpar2", "kpar3")
def test_positional_and_keyword_arguments(self): # noqa: D202
"""Test function when arguments are mix of positional and keywords arguments."""
def func(ppar1, ppar2, ppar3, kpar1=1, kpar2=2, kpar3=3, **kwargs):
pass
assert pexdoc.pinspect.get_function_args(func) == (
"ppar1",
"ppar2",
"ppar3",
"kpar1",
"kpar2",
"kpar3",
"**kwargs",
)
assert pexdoc.pinspect.get_function_args(func, no_varargs=True) == (
"ppar1",
"ppar2",
"ppar3",
"kpar1",
"kpar2",
"kpar3",
)
def test_no_arguments(self): # noqa: D202
"""Test function when there are no arguments passed."""
def func():
pass
assert pexdoc.pinspect.get_function_args(func) == ()
def test_no_self(self): # noqa: D202
"""Test function when there are no arguments passed."""
class MyClass(object):
def __init__(self, value, **kwargs):
pass
obj = partial(pexdoc.pinspect.get_function_args, MyClass.__init__)
assert obj() == ("self", "value", "**kwargs")
assert obj(no_self=True) == ("value", "**kwargs")
assert obj(no_self=True, no_varargs=True) == ("value",)
assert obj(no_varargs=True) == ("self", "value")
def test_nonzero(self):
"""Test __nonzero__() function."""
obj = pexdoc.pinspect.Callables()
assert not obj
obj.trace([modfile("pmisc")])
assert obj
|
swagner-de/irws_homeworks | refs/heads/master | word_embeddings/tfidf.py | 1 | import math
def compute_idf(docs):
idf = {}
for doc in docs:
for term in doc:
try:
idf[term] += 1
except KeyError:
idf[term] = 1
for k in idf:
idf[k] = len(docs)/idf[k]
return idf
def compute_tf(doc):
tf = {}
for term in doc:
try:
tf[term]
except KeyError:
tf[term] = doc.count(term)
return tf
def compute_tfidf(idf, doc):
tfidf = {}
tf = compute_tf(doc)
for k, v in tf.items():
tfidf[k] = v * idf[k]
return tfidf
def cosine(doc1, doc2):
scalar = 0
ldoc1 = 0
ldoc2 = 0
if len(doc1) != len(doc2):
raise ValueError('Vector sizes differ')
for i in range(len(doc1)):
ldoc1 += math.pow(doc1[i], 2)
ldoc2 += math.pow(doc2[i], 2)
scalar += doc1[i] * doc2[i]
ldoc1, ldoc2 = math.sqrt(ldoc1), math.sqrt(ldoc2)
return scalar/ (ldoc1 * ldoc2)
def rank(query, docs):
results = []
i = 0
for doc in docs:
results.append(
{
'cosine' : cosine(query, doc),
'doc' : doc,
'index' : str(i)
}
)
i += 1
return sorted(results, key=lambda x: x['cosine'], reverse=True) |
loco-odoo/localizacion_co | refs/heads/master | openerp/addons/web/doc/_themes/flask_theme_support.py | 2228 | # flasky extensions. flasky pygments style based on tango style
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
class FlaskyStyle(Style):
background_color = "#f8f8f8"
default_style = ""
styles = {
# No corresponding class for the following:
#Text: "", # class: ''
Whitespace: "underline #f8f8f8", # class: 'w'
Error: "#a40000 border:#ef2929", # class: 'err'
Other: "#000000", # class 'x'
Comment: "italic #8f5902", # class: 'c'
Comment.Preproc: "noitalic", # class: 'cp'
Keyword: "bold #004461", # class: 'k'
Keyword.Constant: "bold #004461", # class: 'kc'
Keyword.Declaration: "bold #004461", # class: 'kd'
Keyword.Namespace: "bold #004461", # class: 'kn'
Keyword.Pseudo: "bold #004461", # class: 'kp'
Keyword.Reserved: "bold #004461", # class: 'kr'
Keyword.Type: "bold #004461", # class: 'kt'
Operator: "#582800", # class: 'o'
Operator.Word: "bold #004461", # class: 'ow' - like keywords
Punctuation: "bold #000000", # class: 'p'
# because special names such as Name.Class, Name.Function, etc.
# are not recognized as such later in the parsing, we choose them
# to look the same as ordinary variables.
Name: "#000000", # class: 'n'
Name.Attribute: "#c4a000", # class: 'na' - to be revised
Name.Builtin: "#004461", # class: 'nb'
Name.Builtin.Pseudo: "#3465a4", # class: 'bp'
Name.Class: "#000000", # class: 'nc' - to be revised
Name.Constant: "#000000", # class: 'no' - to be revised
Name.Decorator: "#888", # class: 'nd' - to be revised
Name.Entity: "#ce5c00", # class: 'ni'
Name.Exception: "bold #cc0000", # class: 'ne'
Name.Function: "#000000", # class: 'nf'
Name.Property: "#000000", # class: 'py'
Name.Label: "#f57900", # class: 'nl'
Name.Namespace: "#000000", # class: 'nn' - to be revised
Name.Other: "#000000", # class: 'nx'
Name.Tag: "bold #004461", # class: 'nt' - like a keyword
Name.Variable: "#000000", # class: 'nv' - to be revised
Name.Variable.Class: "#000000", # class: 'vc' - to be revised
Name.Variable.Global: "#000000", # class: 'vg' - to be revised
Name.Variable.Instance: "#000000", # class: 'vi' - to be revised
Number: "#990000", # class: 'm'
Literal: "#000000", # class: 'l'
Literal.Date: "#000000", # class: 'ld'
String: "#4e9a06", # class: 's'
String.Backtick: "#4e9a06", # class: 'sb'
String.Char: "#4e9a06", # class: 'sc'
String.Doc: "italic #8f5902", # class: 'sd' - like a comment
String.Double: "#4e9a06", # class: 's2'
String.Escape: "#4e9a06", # class: 'se'
String.Heredoc: "#4e9a06", # class: 'sh'
String.Interpol: "#4e9a06", # class: 'si'
String.Other: "#4e9a06", # class: 'sx'
String.Regex: "#4e9a06", # class: 'sr'
String.Single: "#4e9a06", # class: 's1'
String.Symbol: "#4e9a06", # class: 'ss'
Generic: "#000000", # class: 'g'
Generic.Deleted: "#a40000", # class: 'gd'
Generic.Emph: "italic #000000", # class: 'ge'
Generic.Error: "#ef2929", # class: 'gr'
Generic.Heading: "bold #000080", # class: 'gh'
Generic.Inserted: "#00A000", # class: 'gi'
Generic.Output: "#888", # class: 'go'
Generic.Prompt: "#745334", # class: 'gp'
Generic.Strong: "bold #000000", # class: 'gs'
Generic.Subheading: "bold #800080", # class: 'gu'
Generic.Traceback: "bold #a40000", # class: 'gt'
}
|
oudalab/phyllo | refs/heads/master | phyllo/extractors/lactantiusDB.py | 1 | import sqlite3
import urllib
import re
from urllib.request import urlopen
from bs4 import BeautifulSoup
from phyllo.phyllo_logger import logger
# this is mostly good
# books 2-7 are missing from the Latin Library
def getBooks(soup):
siteURL = 'http://www.thelatinlibrary.com'
textsURL = []
# get links to books in the collection
for a in soup.find_all('a', href=True):
link = a['href']
textsURL.append("{}/{}".format(siteURL, a['href']))
# remove unnecessary URLs
while ("http://www.thelatinlibrary.com/index.html" in textsURL):
textsURL.remove("http://www.thelatinlibrary.com/index.html")
textsURL.remove("http://www.thelatinlibrary.com/classics.html")
textsURL.remove("http://www.thelatinlibrary.com/christian.html")
logger.info("\n".join(textsURL))
return textsURL
def main():
# The collection URL below.
collURL = 'http://www.thelatinlibrary.com/lactantius.html'
collOpen = urllib.request.urlopen(collURL)
collSOUP = BeautifulSoup(collOpen, 'html5lib')
author = collSOUP.title.string.strip()
colltitle = collSOUP.title.string.strip()
date = collSOUP.span.string.replace('(', '').replace(')', '').replace(u"\u2013", '-').strip()
textsURL = getBooks(collSOUP)
with sqlite3.connect('texts.db') as db:
c = db.cursor()
c.execute(
'CREATE TABLE IF NOT EXISTS texts (id INTEGER PRIMARY KEY, title TEXT, book TEXT,'
' language TEXT, author TEXT, date TEXT, chapter TEXT, verse TEXT, passage TEXT,'
' link TEXT, documentType TEXT)')
c.execute("DELETE FROM texts WHERE author = 'Lactantius'")
for url in textsURL:
openurl = urllib.request.urlopen(url)
textsoup = BeautifulSoup(openurl, 'html5lib')
title = textsoup.title.string.split(":")[1].strip()
print(title)
chapter = -1
verse = 0
if title.startswith("Divinarum"):
getp = textsoup.find_all('p')
for p in getp:
try:
if p['class'][0].lower() in ['border', 'pagehead', 'shortborder', 'smallborder', 'margin',
'internal_navigation']: # these are not part of the main text
continue
except:
pass
text = p.get_text()
text = text.strip()
verses = []
if re.match("LIBER PRIMUS", text):
continue
chapter = text.split(".")[0]
text = text.replace(chapter + ".", '')
verses.append(text)
verse = 0
for v in verses:
if v.startswith('Lactantius'):
continue
if v is None or v == '' or v.isspace():
continue
# verse number assignment.
verse += 1
c.execute("INSERT INTO texts VALUES (?,?,?,?,?,?,?, ?, ?, ?, ?)",
(None, colltitle, title, 'Latin', author, date, chapter,
verse, v.strip(), url, 'prose'))
else:
chapter = -1
textstr = ''
getp = textsoup.find_all('p')
for p in getp:
try:
if p['class'][0].lower() in ['border', 'pagehead', 'shortborder', 'smallborder', 'margin',
'internal_navigation']: # these are not part of the main text
continue
except:
pass
text = p.get_text()
text = text.strip()
if re.match("\[[0-9]+\]", text):
# this is a chapter heading
textstr = textstr.replace("[", '').replace("]", '').strip()
verses = re.split('[0-9]+', textstr)
chapter += 1
print(chapter)
for v in verses:
if v is None or v == '' or v.isspace():
continue
# verse number assignment.
verse += 1
c.execute("INSERT INTO texts VALUES (?,?,?,?,?,?,?, ?, ?, ?, ?)",
(None, colltitle, title, 'Latin', author, date, chapter,
verse, v.strip(), url, 'prose'))
textstr = ''
verse = 0
textstr = textstr + " " + p.get_text()
textstr = textstr.replace("[", '').replace("]", '').strip()
verses = re.split('[0-9]+', textstr)
chapter += 1
for v in verses:
if v is None or v == '' or v.isspace():
continue
# verse number assignment.
verse += 1
c.execute("INSERT INTO texts VALUES (?,?,?,?,?,?,?, ?, ?, ?, ?)",
(None, colltitle, title, 'Latin', author, date, chapter,
verse, v.strip(), url, 'prose'))
if __name__ == '__main__':
main()
|
dezynetechnologies/odoo | refs/heads/8.0 | openerp/addons/base/tests/__init__.py | 286 | import test_acl
import test_api
import test_base
import test_basecase
import test_db_cursor
import test_expression
import test_func
import test_ir_actions
import test_ir_attachment
import test_ir_filters
import test_ir_sequence
import test_ir_values
import test_mail
import test_menu
import test_orm
import test_osv
import test_qweb
import test_res_config
import test_res_lang
import test_search
import test_translate
#import test_uninstall
import test_view_validation
import test_views
import test_xmlrpc
|
FrankHeimes/xbmc | refs/heads/master | addons/service.xbmc.versioncheck/lib/aptdeamonhandler.py | 177 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Team-XBMC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import xbmc
from common import *
try:
#import apt
import apt
from aptdaemon import client
from aptdaemon import errors
except:
log('python apt import error')
class AptdeamonHandler:
def __init__(self):
self.aptclient = client.AptClient()
def _check_versions(self, package):
if not self._update_cache():
return False, False
try:
trans = self.aptclient.upgrade_packages([package])
#trans = self.aptclient.upgrade_packages("bla")
trans.simulate(reply_handler=self._apttransstarted, error_handler=self._apterrorhandler)
pkg = trans.packages[4][0]
if pkg == package:
cache=apt.Cache()
cache.open(None)
cache.upgrade()
if cache[pkg].installed:
return cache[pkg].installed.version, cache[pkg].candidate.version
return False, False
except Exception as error:
log("Exception while checking versions: %s" %error)
return False, False
def _update_cache(self):
try:
if self.aptclient.update_cache(wait=True) == "exit-success":
return True
else:
return False
except errors.NotAuthorizedError:
log("You are not allowed to update the cache")
return False
def check_upgrade_available(self, package):
'''returns True if newer package is available in the repositories'''
installed, candidate = self._check_versions(package)
if installed and candidate:
if installed != candidate:
log("Version installed %s" %installed)
log("Version available %s" %candidate)
return True
else:
log("Already on newest version")
elif not installed:
log("No installed package found")
return False
else:
return False
def upgrade_package(self, package):
try:
log("Installing new version")
if self.aptclient.upgrade_packages([package], wait=True) == "exit-success":
log("Upgrade successful")
return True
except Exception as error:
log("Exception during upgrade: %s" %error)
return False
def upgrade_system(self):
try:
log("Upgrading system")
if self.aptclient.upgrade_system(wait=True) == "exit-success":
return True
except Exception as error:
log("Exception during system upgrade: %s" %error)
return False
def _getpassword(self):
if len(self._pwd) == 0:
self._pwd = get_password_from_user()
return self._pwd
def _apttransstarted(self):
pass
def _apterrorhandler(self, error):
log("Apt Error %s" %error) |
AlexanderSavelyev/rdkit | refs/heads/master | rdkit/test_list.py | 6 |
tests=[
("python","test_list.py",{'dir':'ML'}),
("python","test_list.py",{'dir':'Chem'}),
("python","test_list.py",{'dir':'DataStructs'}),
("python","test_list.py",{'dir':'Dbase'}),
("python","test_list.py",{'dir':'SimDivFilters'}),
("python","test_list.py",{'dir':'VLib'}),
]
longTests=[
]
if __name__=='__main__':
import sys
from rdkit import TestRunner
failed,tests = TestRunner.RunScript('test_list.py',0,1)
sys.exit(len(failed))
|
ZENGXH/scikit-learn | refs/heads/master | sklearn/metrics/cluster/bicluster.py | 359 | from __future__ import division
import numpy as np
from sklearn.utils.linear_assignment_ import linear_assignment
from sklearn.utils.validation import check_consistent_length, check_array
__all__ = ["consensus_score"]
def _check_rows_and_columns(a, b):
"""Unpacks the row and column arrays and checks their shape."""
check_consistent_length(*a)
check_consistent_length(*b)
checks = lambda x: check_array(x, ensure_2d=False)
a_rows, a_cols = map(checks, a)
b_rows, b_cols = map(checks, b)
return a_rows, a_cols, b_rows, b_cols
def _jaccard(a_rows, a_cols, b_rows, b_cols):
"""Jaccard coefficient on the elements of the two biclusters."""
intersection = ((a_rows * b_rows).sum() *
(a_cols * b_cols).sum())
a_size = a_rows.sum() * a_cols.sum()
b_size = b_rows.sum() * b_cols.sum()
return intersection / (a_size + b_size - intersection)
def _pairwise_similarity(a, b, similarity):
"""Computes pairwise similarity matrix.
result[i, j] is the Jaccard coefficient of a's bicluster i and b's
bicluster j.
"""
a_rows, a_cols, b_rows, b_cols = _check_rows_and_columns(a, b)
n_a = a_rows.shape[0]
n_b = b_rows.shape[0]
result = np.array(list(list(similarity(a_rows[i], a_cols[i],
b_rows[j], b_cols[j])
for j in range(n_b))
for i in range(n_a)))
return result
def consensus_score(a, b, similarity="jaccard"):
"""The similarity of two sets of biclusters.
Similarity between individual biclusters is computed. Then the
best matching between sets is found using the Hungarian algorithm.
The final score is the sum of similarities divided by the size of
the larger set.
Read more in the :ref:`User Guide <biclustering>`.
Parameters
----------
a : (rows, columns)
Tuple of row and column indicators for a set of biclusters.
b : (rows, columns)
Another set of biclusters like ``a``.
similarity : string or function, optional, default: "jaccard"
May be the string "jaccard" to use the Jaccard coefficient, or
any function that takes four arguments, each of which is a 1d
indicator vector: (a_rows, a_columns, b_rows, b_columns).
References
----------
* Hochreiter, Bodenhofer, et. al., 2010. `FABIA: factor analysis
for bicluster acquisition
<https://www.ncbi.nlm.nih.gov/pmc/articles/PMC2881408/>`__.
"""
if similarity == "jaccard":
similarity = _jaccard
matrix = _pairwise_similarity(a, b, similarity)
indices = linear_assignment(1. - matrix)
n_a = len(a[0])
n_b = len(b[0])
return matrix[indices[:, 0], indices[:, 1]].sum() / max(n_a, n_b)
|
lowfatcomputing/pacman | refs/heads/master | test/pacman/tests/sync010.py | 28 | self.description = "Install a package from a sync db with cascaded dependencies"
sp1 = pmpkg("dummy", "1.0-2")
sp1.files = ["bin/dummy",
"usr/man/man1/dummy.1"]
sp1.depends = ["dep1"]
sp2 = pmpkg("dep1")
sp2.files = ["bin/dep1"]
sp2.depends = ["dep2"]
sp3 = pmpkg("dep2")
sp3.files = ["bin/dep2"]
for p in sp1, sp2, sp3:
self.addpkg2db("sync", p);
self.args = "-S %s" % sp1.name
self.addrule("PACMAN_RETCODE=0")
self.addrule("PKG_VERSION=dummy|1.0-2")
self.addrule("PKG_DEPENDS=dummy|dep1")
for p in sp1, sp2, sp3:
self.addrule("PKG_EXIST=%s" % p.name)
for f in p.files:
self.addrule("FILE_EXIST=%s" % f)
self.addrule("PKG_DEPENDS=dep1|dep2")
|
JamesShaeffer/QGIS | refs/heads/master | python/plugins/processing/algs/grass7/ext/r_li_dominance_ascii.py | 45 | # -*- coding: utf-8 -*-
"""
***************************************************************************
r_li_dominance_ascii.py
-----------------------
Date : February 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'February 2016'
__copyright__ = '(C) 2016, Médéric Ribreux'
from .r_li import checkMovingWindow, configFile, moveOutputTxtFile
def checkParameterValuesBeforeExecuting(alg, parameters, context):
return checkMovingWindow(alg, parameters, context, True)
def processCommand(alg, parameters, context, feedback):
configFile(alg, parameters, context, feedback, True)
def processOutputs(alg, parameters, context, feedback):
moveOutputTxtFile(alg, parameters, context)
|
theshadowx/enigma2 | refs/heads/master | lib/python/SIFTeam/SoftwareManager/Rank.py | 2 | from enigma import *
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Components.ActionMap import ActionMap
from Components.Button import Button
from Components.Label import Label
from Components.Pixmap import Pixmap
from Components.Sources.List import List
from Tools.Directories import resolveFilename, SCOPE_CURRENT_SKIN
from Tools.LoadPixmap import LoadPixmap
from SIFTeam.Extra.SAPCL import SAPCL
from SIFTeam.Extra.ExtraActionBox import ExtraActionBox
def RankEntry(rank, description):
rank = int(round(rank, 0))
star = LoadPixmap(cached = True, path = resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/sifteam_others/star.png"))
star_disabled = LoadPixmap(cached = True, path = resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/sifteam_others/star_disabled.png"))
if rank == 1:
return (star, star_disabled, star_disabled, star_disabled, star_disabled, description)
elif rank == 2:
return (star, star, star_disabled, star_disabled, star_disabled, description)
elif rank == 3:
return (star, star, star, star_disabled, star_disabled, description)
elif rank == 4:
return (star, star, star, star, star_disabled, description)
elif rank == 5:
return (star, star, star, star, star, description)
return (star_disabled, star_disabled, star_disabled, star_disabled, star_disabled, description)
class SMRank(Screen):
def __init__(self, session, package):
Screen.__init__(self, session)
self.session = session
self.package = package
self.cachelist = []
self['list'] = List([])
self["text"] = Label("Rank the application %s" % package["name"])
self["key_green"] = Button()
self["key_red"] = Button("")
self["key_blue"] = Button("")
self["key_yellow"] = Button()
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"cancel": self.quit,
"ok": self.ok
}, -2)
self.renderList()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle("Software Manager - Rank - %s" % self.package["name"])
def renderList(self):
self.cachelist = []
self.cachelist.append(RankEntry(0.0, "Really bad"))
self.cachelist.append(RankEntry(1.0, "Can do better"))
self.cachelist.append(RankEntry(2.0, "Sufficient"))
self.cachelist.append(RankEntry(3.0, "Good"))
self.cachelist.append(RankEntry(4.0, "Very good"))
self.cachelist.append(RankEntry(5.0, "Excellent"))
self["list"].setList(self.cachelist)
def rank(self):
id = -1
try:
id = int(self.package["id"])
except Exception, e:
pass
api = SAPCL()
return api.rank(id, self.index)
def rankCallback(self, result):
if result["result"]:
self.session.open(MessageBox, _("Thanks for your rank!"), MessageBox.TYPE_INFO, 3)
else:
self.session.open(MessageBox, _(result["message"]), MessageBox.TYPE_ERROR)
self.quit()
def ok(self):
if len(self.cachelist) == 0:
return
index = self["list"].getIndex()
if index == None:
index = 0
self.index = index
self.session.openWithCallback(self.rankCallback, ExtraActionBox, _("Ranking %s...") % self.package["name"], "Software Manager", self.rank)
def quit(self):
self.close() |
Zord13appdesa/python-for-android | refs/heads/master | python3-alpha/python3-src/Lib/queue.py | 51 | """A multi-producer, multi-consumer queue."""
from time import time as _time
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
from collections import deque
import heapq
__all__ = ['Empty', 'Full', 'Queue', 'PriorityQueue', 'LifoQueue']
class Empty(Exception):
"Exception raised by Queue.get(block=0)/get_nowait()."
pass
class Full(Exception):
"Exception raised by Queue.put(block=0)/put_nowait()."
pass
class Queue:
"""Create a queue object with a given maximum size.
If maxsize is <= 0, the queue size is infinite.
"""
def __init__(self, maxsize=0):
self.maxsize = maxsize
self._init(maxsize)
# mutex must be held whenever the queue is mutating. All methods
# that acquire mutex must release it before returning. mutex
# is shared between the three conditions, so acquiring and
# releasing the conditions also acquires and releases mutex.
self.mutex = _threading.Lock()
# Notify not_empty whenever an item is added to the queue; a
# thread waiting to get is notified then.
self.not_empty = _threading.Condition(self.mutex)
# Notify not_full whenever an item is removed from the queue;
# a thread waiting to put is notified then.
self.not_full = _threading.Condition(self.mutex)
# Notify all_tasks_done whenever the number of unfinished tasks
# drops to zero; thread waiting to join() is notified to resume
self.all_tasks_done = _threading.Condition(self.mutex)
self.unfinished_tasks = 0
def task_done(self):
"""Indicate that a formerly enqueued task is complete.
Used by Queue consumer threads. For each get() used to fetch a task,
a subsequent call to task_done() tells the queue that the processing
on the task is complete.
If a join() is currently blocking, it will resume when all items
have been processed (meaning that a task_done() call was received
for every item that had been put() into the queue).
Raises a ValueError if called more times than there were items
placed in the queue.
"""
self.all_tasks_done.acquire()
try:
unfinished = self.unfinished_tasks - 1
if unfinished <= 0:
if unfinished < 0:
raise ValueError('task_done() called too many times')
self.all_tasks_done.notify_all()
self.unfinished_tasks = unfinished
finally:
self.all_tasks_done.release()
def join(self):
"""Blocks until all items in the Queue have been gotten and processed.
The count of unfinished tasks goes up whenever an item is added to the
queue. The count goes down whenever a consumer thread calls task_done()
to indicate the item was retrieved and all work on it is complete.
When the count of unfinished tasks drops to zero, join() unblocks.
"""
self.all_tasks_done.acquire()
try:
while self.unfinished_tasks:
self.all_tasks_done.wait()
finally:
self.all_tasks_done.release()
def qsize(self):
"""Return the approximate size of the queue (not reliable!)."""
self.mutex.acquire()
n = self._qsize()
self.mutex.release()
return n
def empty(self):
"""Return True if the queue is empty, False otherwise (not reliable!).
This method is likely to be removed at some point. Use qsize() == 0
as a direct substitute, but be aware that either approach risks a race
condition where a queue can grow before the result of empty() or
qsize() can be used.
To create code that needs to wait for all queued tasks to be
completed, the preferred technique is to use the join() method.
"""
self.mutex.acquire()
n = not self._qsize()
self.mutex.release()
return n
def full(self):
"""Return True if the queue is full, False otherwise (not reliable!).
This method is likely to be removed at some point. Use qsize() >= n
as a direct substitute, but be aware that either approach risks a race
condition where a queue can shrink before the result of full() or
qsize() can be used.
"""
self.mutex.acquire()
n = 0 < self.maxsize <= self._qsize()
self.mutex.release()
return n
def put(self, item, block=True, timeout=None):
"""Put an item into the queue.
If optional args 'block' is true and 'timeout' is None (the default),
block if necessary until a free slot is available. If 'timeout' is
a positive number, it blocks at most 'timeout' seconds and raises
the Full exception if no free slot was available within that time.
Otherwise ('block' is false), put an item on the queue if a free slot
is immediately available, else raise the Full exception ('timeout'
is ignored in that case).
"""
self.not_full.acquire()
try:
if self.maxsize > 0:
if not block:
if self._qsize() >= self.maxsize:
raise Full
elif timeout is None:
while self._qsize() >= self.maxsize:
self.not_full.wait()
elif timeout < 0:
raise ValueError("'timeout' must be a positive number")
else:
endtime = _time() + timeout
while self._qsize() >= self.maxsize:
remaining = endtime - _time()
if remaining <= 0.0:
raise Full
self.not_full.wait(remaining)
self._put(item)
self.unfinished_tasks += 1
self.not_empty.notify()
finally:
self.not_full.release()
def put_nowait(self, item):
"""Put an item into the queue without blocking.
Only enqueue the item if a free slot is immediately available.
Otherwise raise the Full exception.
"""
return self.put(item, False)
def get(self, block=True, timeout=None):
"""Remove and return an item from the queue.
If optional args 'block' is true and 'timeout' is None (the default),
block if necessary until an item is available. If 'timeout' is
a positive number, it blocks at most 'timeout' seconds and raises
the Empty exception if no item was available within that time.
Otherwise ('block' is false), return an item if one is immediately
available, else raise the Empty exception ('timeout' is ignored
in that case).
"""
self.not_empty.acquire()
try:
if not block:
if not self._qsize():
raise Empty
elif timeout is None:
while not self._qsize():
self.not_empty.wait()
elif timeout < 0:
raise ValueError("'timeout' must be a positive number")
else:
endtime = _time() + timeout
while not self._qsize():
remaining = endtime - _time()
if remaining <= 0.0:
raise Empty
self.not_empty.wait(remaining)
item = self._get()
self.not_full.notify()
return item
finally:
self.not_empty.release()
def get_nowait(self):
"""Remove and return an item from the queue without blocking.
Only get an item if one is immediately available. Otherwise
raise the Empty exception.
"""
return self.get(False)
# Override these methods to implement other queue organizations
# (e.g. stack or priority queue).
# These will only be called with appropriate locks held
# Initialize the queue representation
def _init(self, maxsize):
self.queue = deque()
def _qsize(self, len=len):
return len(self.queue)
# Put a new item in the queue
def _put(self, item):
self.queue.append(item)
# Get an item from the queue
def _get(self):
return self.queue.popleft()
class PriorityQueue(Queue):
'''Variant of Queue that retrieves open entries in priority order (lowest first).
Entries are typically tuples of the form: (priority number, data).
'''
def _init(self, maxsize):
self.queue = []
def _qsize(self, len=len):
return len(self.queue)
def _put(self, item, heappush=heapq.heappush):
heappush(self.queue, item)
def _get(self, heappop=heapq.heappop):
return heappop(self.queue)
class LifoQueue(Queue):
'''Variant of Queue that retrieves most recently added entries first.'''
def _init(self, maxsize):
self.queue = []
def _qsize(self, len=len):
return len(self.queue)
def _put(self, item):
self.queue.append(item)
def _get(self):
return self.queue.pop()
|
cedricbonhomme/Stegano | refs/heads/master | bin/statistics.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Stegano - Stegano is a pure Python steganography module.
# Copyright (C) 2010-2021 Cédric Bonhomme - https://www.cedricbonhomme.org
#
# For more information : https://github.com/cedricbonhomme/Stegano
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
__author__ = "Cédric Bonhomme"
__version__ = "$Revision: 0.1 $"
__date__ = "$Date: 2016/08/26 $"
__revision__ = "$Date: 2016/08/26 $"
__license__ = "GPLv3"
from PIL import Image
import argparse
try:
from stegano.steganalysis import statistics
except:
print("Install Stegano: sudo pip install Stegano")
def main():
parser = argparse.ArgumentParser(prog='stegano-steganalysis-parity')
parser.add_argument("-i", "--input", dest="input_image_file",
help="Image file")
parser.add_argument("-o", "--output", dest="output_image_file",
help="Image file")
arguments = parser.parse_args()
input_image_file = Image.open(arguments.input_image_file)
output_image = statistics.steganalyse(input_image_file)
output_image.save(arguments.output_image_file)
|
yasoob/PythonRSSReader | refs/heads/master | venv/lib/python2.7/dist-packages/samba/dbchecker.py | 1 | # Samba4 AD database checker
#
# Copyright (C) Andrew Tridgell 2011
# Copyright (C) Matthieu Patou <[email protected]> 2011
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import ldb
import samba
import time
from samba import dsdb
from samba import common
from samba.dcerpc import misc
from samba.ndr import ndr_unpack, ndr_pack
from samba.dcerpc import drsblobs
from samba.common import dsdb_Dn
from samba.dcerpc import security
from samba.descriptor import get_wellknown_sds, get_diff_sds
from samba.auth import system_session, admin_session
class dbcheck(object):
"""check a SAM database for errors"""
def __init__(self, samdb, samdb_schema=None, verbose=False, fix=False,
yes=False, quiet=False, in_transaction=False,
reset_well_known_acls=False):
self.samdb = samdb
self.dict_oid_name = None
self.samdb_schema = (samdb_schema or samdb)
self.verbose = verbose
self.fix = fix
self.yes = yes
self.quiet = quiet
self.remove_all_unknown_attributes = False
self.remove_all_empty_attributes = False
self.fix_all_normalisation = False
self.fix_all_DN_GUIDs = False
self.fix_all_binary_dn = False
self.remove_all_deleted_DN_links = False
self.fix_all_target_mismatch = False
self.fix_all_metadata = False
self.fix_time_metadata = False
self.fix_all_missing_backlinks = False
self.fix_all_orphaned_backlinks = False
self.fix_rmd_flags = False
self.fix_ntsecuritydescriptor = False
self.fix_ntsecuritydescriptor_owner_group = False
self.seize_fsmo_role = False
self.move_to_lost_and_found = False
self.fix_instancetype = False
self.fix_replmetadata_zero_invocationid = False
self.fix_deleted_deleted_objects = False
self.reset_well_known_acls = reset_well_known_acls
self.reset_all_well_known_acls = False
self.in_transaction = in_transaction
self.infrastructure_dn = ldb.Dn(samdb, "CN=Infrastructure," + samdb.domain_dn())
self.naming_dn = ldb.Dn(samdb, "CN=Partitions,%s" % samdb.get_config_basedn())
self.schema_dn = samdb.get_schema_basedn()
self.rid_dn = ldb.Dn(samdb, "CN=RID Manager$,CN=System," + samdb.domain_dn())
self.ntds_dsa = ldb.Dn(samdb, samdb.get_dsServiceName())
self.class_schemaIDGUID = {}
self.wellknown_sds = get_wellknown_sds(self.samdb)
self.name_map = {}
try:
res = samdb.search(base="CN=DnsAdmins,CN=Users,%s" % samdb.domain_dn(), scope=ldb.SCOPE_BASE,
attrs=["objectSid"])
dnsadmins_sid = ndr_unpack(security.dom_sid, res[0]["objectSid"][0])
self.name_map['DnsAdmins'] = str(dnsadmins_sid)
except ldb.LdbError, (enum, estr):
if enum != ldb.ERR_NO_SUCH_OBJECT:
raise
pass
self.system_session_info = system_session()
self.admin_session_info = admin_session(None, samdb.get_domain_sid())
res = self.samdb.search(base=self.ntds_dsa, scope=ldb.SCOPE_BASE, attrs=['msDS-hasMasterNCs', 'hasMasterNCs'])
if "msDS-hasMasterNCs" in res[0]:
self.write_ncs = res[0]["msDS-hasMasterNCs"]
else:
# If the Forest Level is less than 2003 then there is no
# msDS-hasMasterNCs, so we fall back to hasMasterNCs
# no need to merge as all the NCs that are in hasMasterNCs must
# also be in msDS-hasMasterNCs (but not the opposite)
if "hasMasterNCs" in res[0]:
self.write_ncs = res[0]["hasMasterNCs"]
else:
self.write_ncs = None
res = self.samdb.search(base="", scope=ldb.SCOPE_BASE, attrs=['namingContexts'])
try:
ncs = res[0]["namingContexts"]
self.deleted_objects_containers = []
for nc in ncs:
try:
dn = self.samdb.get_wellknown_dn(ldb.Dn(self.samdb, nc),
dsdb.DS_GUID_DELETED_OBJECTS_CONTAINER)
self.deleted_objects_containers.append(dn)
except KeyError:
pass
except KeyError:
pass
except IndexError:
pass
def check_database(self, DN=None, scope=ldb.SCOPE_SUBTREE, controls=[], attrs=['*']):
'''perform a database check, returning the number of errors found'''
res = self.samdb.search(base=DN, scope=scope, attrs=['dn'], controls=controls)
self.report('Checking %u objects' % len(res))
error_count = 0
for object in res:
error_count += self.check_object(object.dn, attrs=attrs)
if DN is None:
error_count += self.check_rootdse()
if error_count != 0 and not self.fix:
self.report("Please use --fix to fix these errors")
self.report('Checked %u objects (%u errors)' % (len(res), error_count))
return error_count
def report(self, msg):
'''print a message unless quiet is set'''
if not self.quiet:
print(msg)
def confirm(self, msg, allow_all=False, forced=False):
'''confirm a change'''
if not self.fix:
return False
if self.quiet:
return self.yes
if self.yes:
forced = True
return common.confirm(msg, forced=forced, allow_all=allow_all)
################################################################
# a local confirm function with support for 'all'
def confirm_all(self, msg, all_attr):
'''confirm a change with support for "all" '''
if not self.fix:
return False
if self.quiet:
return self.yes
if getattr(self, all_attr) == 'NONE':
return False
if getattr(self, all_attr) == 'ALL':
forced = True
else:
forced = self.yes
c = common.confirm(msg, forced=forced, allow_all=True)
if c == 'ALL':
setattr(self, all_attr, 'ALL')
return True
if c == 'NONE':
setattr(self, all_attr, 'NONE')
return False
return c
def do_modify(self, m, controls, msg, validate=True):
'''perform a modify with optional verbose output'''
if self.verbose:
self.report(self.samdb.write_ldif(m, ldb.CHANGETYPE_MODIFY))
try:
controls = controls + ["local_oid:%s:0" % dsdb.DSDB_CONTROL_DBCHECK]
self.samdb.modify(m, controls=controls, validate=validate)
except Exception, err:
self.report("%s : %s" % (msg, err))
return False
return True
def do_rename(self, from_dn, to_rdn, to_base, controls, msg):
'''perform a modify with optional verbose output'''
if self.verbose:
self.report("""dn: %s
changeType: modrdn
newrdn: %s
deleteOldRdn: 1
newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
try:
to_dn = to_rdn + to_base
controls = controls + ["local_oid:%s:0" % dsdb.DSDB_CONTROL_DBCHECK]
self.samdb.rename(from_dn, to_dn, controls=controls)
except Exception, err:
self.report("%s : %s" % (msg, err))
return False
return True
def err_empty_attribute(self, dn, attrname):
'''fix empty attributes'''
self.report("ERROR: Empty attribute %s in %s" % (attrname, dn))
if not self.confirm_all('Remove empty attribute %s from %s?' % (attrname, dn), 'remove_all_empty_attributes'):
self.report("Not fixing empty attribute %s" % attrname)
return
m = ldb.Message()
m.dn = dn
m[attrname] = ldb.MessageElement('', ldb.FLAG_MOD_DELETE, attrname)
if self.do_modify(m, ["relax:0", "show_recycled:1"],
"Failed to remove empty attribute %s" % attrname, validate=False):
self.report("Removed empty attribute %s" % attrname)
def err_normalise_mismatch(self, dn, attrname, values):
'''fix attribute normalisation errors'''
self.report("ERROR: Normalisation error for attribute %s in %s" % (attrname, dn))
mod_list = []
for val in values:
normalised = self.samdb.dsdb_normalise_attributes(
self.samdb_schema, attrname, [val])
if len(normalised) != 1:
self.report("Unable to normalise value '%s'" % val)
mod_list.append((val, ''))
elif (normalised[0] != val):
self.report("value '%s' should be '%s'" % (val, normalised[0]))
mod_list.append((val, normalised[0]))
if not self.confirm_all('Fix normalisation for %s from %s?' % (attrname, dn), 'fix_all_normalisation'):
self.report("Not fixing attribute %s" % attrname)
return
m = ldb.Message()
m.dn = dn
for i in range(0, len(mod_list)):
(val, nval) = mod_list[i]
m['value_%u' % i] = ldb.MessageElement(val, ldb.FLAG_MOD_DELETE, attrname)
if nval != '':
m['normv_%u' % i] = ldb.MessageElement(nval, ldb.FLAG_MOD_ADD,
attrname)
if self.do_modify(m, ["relax:0", "show_recycled:1"],
"Failed to normalise attribute %s" % attrname,
validate=False):
self.report("Normalised attribute %s" % attrname)
def err_normalise_mismatch_replace(self, dn, attrname, values):
'''fix attribute normalisation errors'''
normalised = self.samdb.dsdb_normalise_attributes(self.samdb_schema, attrname, values)
self.report("ERROR: Normalisation error for attribute '%s' in '%s'" % (attrname, dn))
self.report("Values/Order of values do/does not match: %s/%s!" % (values, list(normalised)))
if list(normalised) == values:
return
if not self.confirm_all("Fix normalisation for '%s' from '%s'?" % (attrname, dn), 'fix_all_normalisation'):
self.report("Not fixing attribute '%s'" % attrname)
return
m = ldb.Message()
m.dn = dn
m[attrname] = ldb.MessageElement(normalised, ldb.FLAG_MOD_REPLACE, attrname)
if self.do_modify(m, ["relax:0", "show_recycled:1"],
"Failed to normalise attribute %s" % attrname,
validate=False):
self.report("Normalised attribute %s" % attrname)
def is_deleted_objects_dn(self, dsdb_dn):
'''see if a dsdb_Dn is the special Deleted Objects DN'''
return dsdb_dn.prefix == "B:32:%s:" % dsdb.DS_GUID_DELETED_OBJECTS_CONTAINER
def err_deleted_dn(self, dn, attrname, val, dsdb_dn, correct_dn):
"""handle a DN pointing to a deleted object"""
self.report("ERROR: target DN is deleted for %s in object %s - %s" % (attrname, dn, val))
self.report("Target GUID points at deleted DN %s" % correct_dn)
if not self.confirm_all('Remove DN link?', 'remove_all_deleted_DN_links'):
self.report("Not removing")
return
m = ldb.Message()
m.dn = dn
m['old_value'] = ldb.MessageElement(val, ldb.FLAG_MOD_DELETE, attrname)
if self.do_modify(m, ["show_recycled:1", "local_oid:%s:0" % dsdb.DSDB_CONTROL_DBCHECK],
"Failed to remove deleted DN attribute %s" % attrname):
self.report("Removed deleted DN on attribute %s" % attrname)
def err_missing_dn_GUID(self, dn, attrname, val, dsdb_dn):
"""handle a missing target DN (both GUID and DN string form are missing)"""
# check if its a backlink
linkID = self.samdb_schema.get_linkId_from_lDAPDisplayName(attrname)
if (linkID & 1 == 0) and str(dsdb_dn).find('\\0ADEL') == -1:
self.report("Not removing dangling forward link")
return
self.err_deleted_dn(dn, attrname, val, dsdb_dn, dsdb_dn)
def err_incorrect_dn_GUID(self, dn, attrname, val, dsdb_dn, errstr):
"""handle a missing GUID extended DN component"""
self.report("ERROR: %s component for %s in object %s - %s" % (errstr, attrname, dn, val))
controls=["extended_dn:1:1", "show_recycled:1"]
try:
res = self.samdb.search(base=str(dsdb_dn.dn), scope=ldb.SCOPE_BASE,
attrs=[], controls=controls)
except ldb.LdbError, (enum, estr):
self.report("unable to find object for DN %s - (%s)" % (dsdb_dn.dn, estr))
self.err_missing_dn_GUID(dn, attrname, val, dsdb_dn)
return
if len(res) == 0:
self.report("unable to find object for DN %s" % dsdb_dn.dn)
self.err_missing_dn_GUID(dn, attrname, val, dsdb_dn)
return
dsdb_dn.dn = res[0].dn
if not self.confirm_all('Change DN to %s?' % str(dsdb_dn), 'fix_all_DN_GUIDs'):
self.report("Not fixing %s" % errstr)
return
m = ldb.Message()
m.dn = dn
m['old_value'] = ldb.MessageElement(val, ldb.FLAG_MOD_DELETE, attrname)
m['new_value'] = ldb.MessageElement(str(dsdb_dn), ldb.FLAG_MOD_ADD, attrname)
if self.do_modify(m, ["show_recycled:1"],
"Failed to fix %s on attribute %s" % (errstr, attrname)):
self.report("Fixed %s on attribute %s" % (errstr, attrname))
def err_incorrect_binary_dn(self, dn, attrname, val, dsdb_dn, errstr):
"""handle an incorrect binary DN component"""
self.report("ERROR: %s binary component for %s in object %s - %s" % (errstr, attrname, dn, val))
controls=["extended_dn:1:1", "show_recycled:1"]
if not self.confirm_all('Change DN to %s?' % str(dsdb_dn), 'fix_all_binary_dn'):
self.report("Not fixing %s" % errstr)
return
m = ldb.Message()
m.dn = dn
m['old_value'] = ldb.MessageElement(val, ldb.FLAG_MOD_DELETE, attrname)
m['new_value'] = ldb.MessageElement(str(dsdb_dn), ldb.FLAG_MOD_ADD, attrname)
if self.do_modify(m, ["show_recycled:1"],
"Failed to fix %s on attribute %s" % (errstr, attrname)):
self.report("Fixed %s on attribute %s" % (errstr, attrname))
def err_dn_target_mismatch(self, dn, attrname, val, dsdb_dn, correct_dn, errstr):
"""handle a DN string being incorrect"""
self.report("ERROR: incorrect DN string component for %s in object %s - %s" % (attrname, dn, val))
dsdb_dn.dn = correct_dn
if not self.confirm_all('Change DN to %s?' % str(dsdb_dn), 'fix_all_target_mismatch'):
self.report("Not fixing %s" % errstr)
return
m = ldb.Message()
m.dn = dn
m['old_value'] = ldb.MessageElement(val, ldb.FLAG_MOD_DELETE, attrname)
m['new_value'] = ldb.MessageElement(str(dsdb_dn), ldb.FLAG_MOD_ADD, attrname)
if self.do_modify(m, ["show_recycled:1"],
"Failed to fix incorrect DN string on attribute %s" % attrname):
self.report("Fixed incorrect DN string on attribute %s" % (attrname))
def err_unknown_attribute(self, obj, attrname):
'''handle an unknown attribute error'''
self.report("ERROR: unknown attribute '%s' in %s" % (attrname, obj.dn))
if not self.confirm_all('Remove unknown attribute %s' % attrname, 'remove_all_unknown_attributes'):
self.report("Not removing %s" % attrname)
return
m = ldb.Message()
m.dn = obj.dn
m['old_value'] = ldb.MessageElement([], ldb.FLAG_MOD_DELETE, attrname)
if self.do_modify(m, ["relax:0", "show_recycled:1"],
"Failed to remove unknown attribute %s" % attrname):
self.report("Removed unknown attribute %s" % (attrname))
def err_missing_backlink(self, obj, attrname, val, backlink_name, target_dn):
'''handle a missing backlink value'''
self.report("ERROR: missing backlink attribute '%s' in %s for link %s in %s" % (backlink_name, target_dn, attrname, obj.dn))
if not self.confirm_all('Fix missing backlink %s' % backlink_name, 'fix_all_missing_backlinks'):
self.report("Not fixing missing backlink %s" % backlink_name)
return
m = ldb.Message()
m.dn = obj.dn
m['old_value'] = ldb.MessageElement(val, ldb.FLAG_MOD_DELETE, attrname)
m['new_value'] = ldb.MessageElement(val, ldb.FLAG_MOD_ADD, attrname)
if self.do_modify(m, ["show_recycled:1"],
"Failed to fix missing backlink %s" % backlink_name):
self.report("Fixed missing backlink %s" % (backlink_name))
def err_incorrect_rmd_flags(self, obj, attrname, revealed_dn):
'''handle a incorrect RMD_FLAGS value'''
rmd_flags = int(revealed_dn.dn.get_extended_component("RMD_FLAGS"))
self.report("ERROR: incorrect RMD_FLAGS value %u for attribute '%s' in %s for link %s" % (rmd_flags, attrname, obj.dn, revealed_dn.dn.extended_str()))
if not self.confirm_all('Fix incorrect RMD_FLAGS %u' % rmd_flags, 'fix_rmd_flags'):
self.report("Not fixing incorrect RMD_FLAGS %u" % rmd_flags)
return
m = ldb.Message()
m.dn = obj.dn
m['old_value'] = ldb.MessageElement(str(revealed_dn), ldb.FLAG_MOD_DELETE, attrname)
if self.do_modify(m, ["show_recycled:1", "reveal_internals:0", "show_deleted:0"],
"Failed to fix incorrect RMD_FLAGS %u" % rmd_flags):
self.report("Fixed incorrect RMD_FLAGS %u" % (rmd_flags))
def err_orphaned_backlink(self, obj, attrname, val, link_name, target_dn):
'''handle a orphaned backlink value'''
self.report("ERROR: orphaned backlink attribute '%s' in %s for link %s in %s" % (attrname, obj.dn, link_name, target_dn))
if not self.confirm_all('Remove orphaned backlink %s' % link_name, 'fix_all_orphaned_backlinks'):
self.report("Not removing orphaned backlink %s" % link_name)
return
m = ldb.Message()
m.dn = obj.dn
m['value'] = ldb.MessageElement(val, ldb.FLAG_MOD_DELETE, attrname)
if self.do_modify(m, ["show_recycled:1", "relax:0"],
"Failed to fix orphaned backlink %s" % link_name):
self.report("Fixed orphaned backlink %s" % (link_name))
def err_no_fsmoRoleOwner(self, obj):
'''handle a missing fSMORoleOwner'''
self.report("ERROR: fSMORoleOwner not found for role %s" % (obj.dn))
res = self.samdb.search("",
scope=ldb.SCOPE_BASE, attrs=["dsServiceName"])
assert len(res) == 1
serviceName = res[0]["dsServiceName"][0]
if not self.confirm_all('Sieze role %s onto current DC by adding fSMORoleOwner=%s' % (obj.dn, serviceName), 'seize_fsmo_role'):
self.report("Not Siezing role %s onto current DC by adding fSMORoleOwner=%s" % (obj.dn, serviceName))
return
m = ldb.Message()
m.dn = obj.dn
m['value'] = ldb.MessageElement(serviceName, ldb.FLAG_MOD_ADD, 'fSMORoleOwner')
if self.do_modify(m, [],
"Failed to sieze role %s onto current DC by adding fSMORoleOwner=%s" % (obj.dn, serviceName)):
self.report("Siezed role %s onto current DC by adding fSMORoleOwner=%s" % (obj.dn, serviceName))
def err_missing_parent(self, obj):
'''handle a missing parent'''
self.report("ERROR: parent object not found for %s" % (obj.dn))
if not self.confirm_all('Move object %s into LostAndFound?' % (obj.dn), 'move_to_lost_and_found'):
self.report('Not moving object %s into LostAndFound' % (obj.dn))
return
keep_transaction = True
self.samdb.transaction_start()
try:
nc_root = self.samdb.get_nc_root(obj.dn);
lost_and_found = self.samdb.get_wellknown_dn(nc_root, dsdb.DS_GUID_LOSTANDFOUND_CONTAINER)
new_dn = ldb.Dn(self.samdb, str(obj.dn))
new_dn.remove_base_components(len(new_dn) - 1)
if self.do_rename(obj.dn, new_dn, lost_and_found, ["show_deleted:0", "relax:0"],
"Failed to rename object %s into lostAndFound at %s" % (obj.dn, new_dn + lost_and_found)):
self.report("Renamed object %s into lostAndFound at %s" % (obj.dn, new_dn + lost_and_found))
m = ldb.Message()
m.dn = obj.dn
m['lastKnownParent'] = ldb.MessageElement(str(obj.dn.parent()), ldb.FLAG_MOD_REPLACE, 'lastKnownParent')
if self.do_modify(m, [],
"Failed to set lastKnownParent on lostAndFound object at %s" % (new_dn + lost_and_found)):
self.report("Set lastKnownParent on lostAndFound object at %s" % (new_dn + lost_and_found))
keep_transaction = True
except:
self.samdb.transaction_cancel()
raise
if keep_transaction:
self.samdb.transaction_commit()
else:
self.samdb.transaction_cancel()
def err_wrong_instancetype(self, obj, calculated_instancetype):
'''handle a wrong instanceType'''
self.report("ERROR: wrong instanceType %s on %s, should be %d" % (obj["instanceType"], obj.dn, calculated_instancetype))
if not self.confirm_all('Change instanceType from %s to %d on %s?' % (obj["instanceType"], calculated_instancetype, obj.dn), 'fix_instancetype'):
self.report('Not changing instanceType from %s to %d on %s' % (obj["instanceType"], calculated_instancetype, obj.dn))
return
m = ldb.Message()
m.dn = obj.dn
m['value'] = ldb.MessageElement(str(calculated_instancetype), ldb.FLAG_MOD_REPLACE, 'instanceType')
if self.do_modify(m, ["local_oid:%s:0" % dsdb.DSDB_CONTROL_DBCHECK_MODIFY_RO_REPLICA],
"Failed to correct missing instanceType on %s by setting instanceType=%d" % (obj.dn, calculated_instancetype)):
self.report("Corrected instancetype on %s by setting instanceType=%d" % (obj.dn, calculated_instancetype))
def find_revealed_link(self, dn, attrname, guid):
'''return a revealed link in an object'''
res = self.samdb.search(base=dn, scope=ldb.SCOPE_BASE, attrs=[attrname],
controls=["show_deleted:0", "extended_dn:0", "reveal_internals:0"])
syntax_oid = self.samdb_schema.get_syntax_oid_from_lDAPDisplayName(attrname)
for val in res[0][attrname]:
dsdb_dn = dsdb_Dn(self.samdb, val, syntax_oid)
guid2 = dsdb_dn.dn.get_extended_component("GUID")
if guid == guid2:
return dsdb_dn
return None
def check_dn(self, obj, attrname, syntax_oid):
'''check a DN attribute for correctness'''
error_count = 0
for val in obj[attrname]:
dsdb_dn = dsdb_Dn(self.samdb, val, syntax_oid)
# all DNs should have a GUID component
guid = dsdb_dn.dn.get_extended_component("GUID")
if guid is None:
error_count += 1
self.err_incorrect_dn_GUID(obj.dn, attrname, val, dsdb_dn,
"missing GUID")
continue
guidstr = str(misc.GUID(guid))
attrs = ['isDeleted']
if (str(attrname).lower() == 'msds-hasinstantiatedncs') and (obj.dn == self.ntds_dsa):
fixing_msDS_HasInstantiatedNCs = True
attrs.append("instanceType")
else:
fixing_msDS_HasInstantiatedNCs = False
linkID = self.samdb_schema.get_linkId_from_lDAPDisplayName(attrname)
reverse_link_name = self.samdb_schema.get_backlink_from_lDAPDisplayName(attrname)
if reverse_link_name is not None:
attrs.append(reverse_link_name)
# check its the right GUID
try:
res = self.samdb.search(base="<GUID=%s>" % guidstr, scope=ldb.SCOPE_BASE,
attrs=attrs, controls=["extended_dn:1:1", "show_recycled:1"])
except ldb.LdbError, (enum, estr):
error_count += 1
self.err_incorrect_dn_GUID(obj.dn, attrname, val, dsdb_dn, "incorrect GUID")
continue
if fixing_msDS_HasInstantiatedNCs:
dsdb_dn.prefix = "B:8:%08X:" % int(res[0]['instanceType'][0])
dsdb_dn.binary = "%08X" % int(res[0]['instanceType'][0])
if str(dsdb_dn) != val:
error_count +=1
self.err_incorrect_binary_dn(obj.dn, attrname, val, dsdb_dn, "incorrect instanceType part of Binary DN")
continue
# now we have two cases - the source object might or might not be deleted
is_deleted = 'isDeleted' in obj and obj['isDeleted'][0].upper() == 'TRUE'
target_is_deleted = 'isDeleted' in res[0] and res[0]['isDeleted'][0].upper() == 'TRUE'
# the target DN is not allowed to be deleted, unless the target DN is the
# special Deleted Objects container
if target_is_deleted and not is_deleted and not self.is_deleted_objects_dn(dsdb_dn):
error_count += 1
self.err_deleted_dn(obj.dn, attrname, val, dsdb_dn, res[0].dn)
continue
# check the DN matches in string form
if res[0].dn.extended_str() != dsdb_dn.dn.extended_str():
error_count += 1
self.err_dn_target_mismatch(obj.dn, attrname, val, dsdb_dn,
res[0].dn, "incorrect string version of DN")
continue
if is_deleted and not target_is_deleted and reverse_link_name is not None:
revealed_dn = self.find_revealed_link(obj.dn, attrname, guid)
rmd_flags = revealed_dn.dn.get_extended_component("RMD_FLAGS")
if rmd_flags is not None and (int(rmd_flags) & 1) == 0:
# the RMD_FLAGS for this link should be 1, as the target is deleted
self.err_incorrect_rmd_flags(obj, attrname, revealed_dn)
continue
# check the reverse_link is correct if there should be one
if reverse_link_name is not None:
match_count = 0
if reverse_link_name in res[0]:
for v in res[0][reverse_link_name]:
if v == obj.dn.extended_str():
match_count += 1
if match_count != 1:
error_count += 1
if linkID & 1:
self.err_orphaned_backlink(obj, attrname, val, reverse_link_name, dsdb_dn.dn)
else:
self.err_missing_backlink(obj, attrname, val, reverse_link_name, dsdb_dn.dn)
continue
return error_count
def get_originating_time(self, val, attid):
'''Read metadata properties and return the originating time for
a given attributeId.
:return: the originating time or 0 if not found
'''
repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob, str(val))
obj = repl.ctr
for o in repl.ctr.array:
if o.attid == attid:
return o.originating_change_time
return 0
def process_metadata(self, val):
'''Read metadata properties and list attributes in it'''
list_att = []
repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob, str(val))
obj = repl.ctr
for o in repl.ctr.array:
att = self.samdb_schema.get_lDAPDisplayName_by_attid(o.attid)
list_att.append(att.lower())
return list_att
def fix_metadata(self, dn, attr):
'''re-write replPropertyMetaData elements for a single attribute for a
object. This is used to fix missing replPropertyMetaData elements'''
res = self.samdb.search(base = dn, scope=ldb.SCOPE_BASE, attrs = [attr],
controls = ["search_options:1:2", "show_recycled:1"])
msg = res[0]
nmsg = ldb.Message()
nmsg.dn = dn
nmsg[attr] = ldb.MessageElement(msg[attr], ldb.FLAG_MOD_REPLACE, attr)
if self.do_modify(nmsg, ["relax:0", "provision:0", "show_recycled:1"],
"Failed to fix metadata for attribute %s" % attr):
self.report("Fixed metadata for attribute %s" % attr)
def ace_get_effective_inherited_type(self, ace):
if ace.flags & security.SEC_ACE_FLAG_INHERIT_ONLY:
return None
check = False
if ace.type == security.SEC_ACE_TYPE_ACCESS_ALLOWED_OBJECT:
check = True
elif ace.type == security.SEC_ACE_TYPE_ACCESS_DENIED_OBJECT:
check = True
elif ace.type == security.SEC_ACE_TYPE_SYSTEM_AUDIT_OBJECT:
check = True
elif ace.type == security.SEC_ACE_TYPE_SYSTEM_ALARM_OBJECT:
check = True
if not check:
return None
if not ace.object.flags & security.SEC_ACE_INHERITED_OBJECT_TYPE_PRESENT:
return None
return str(ace.object.inherited_type)
def lookup_class_schemaIDGUID(self, cls):
if cls in self.class_schemaIDGUID:
return self.class_schemaIDGUID[cls]
flt = "(&(ldapDisplayName=%s)(objectClass=classSchema))" % cls
res = self.samdb.search(base=self.schema_dn,
expression=flt,
attrs=["schemaIDGUID"])
t = str(ndr_unpack(misc.GUID, res[0]["schemaIDGUID"][0]))
self.class_schemaIDGUID[cls] = t
return t
def process_sd(self, dn, obj):
sd_attr = "nTSecurityDescriptor"
sd_val = obj[sd_attr]
sd = ndr_unpack(security.descriptor, str(sd_val))
is_deleted = 'isDeleted' in obj and obj['isDeleted'][0].upper() == 'TRUE'
if is_deleted:
# we don't fix deleted objects
return (sd, None)
sd_clean = security.descriptor()
sd_clean.owner_sid = sd.owner_sid
sd_clean.group_sid = sd.group_sid
sd_clean.type = sd.type
sd_clean.revision = sd.revision
broken = False
last_inherited_type = None
aces = []
if sd.sacl is not None:
aces = sd.sacl.aces
for i in range(0, len(aces)):
ace = aces[i]
if not ace.flags & security.SEC_ACE_FLAG_INHERITED_ACE:
sd_clean.sacl_add(ace)
continue
t = self.ace_get_effective_inherited_type(ace)
if t is None:
continue
if last_inherited_type is not None:
if t != last_inherited_type:
# if it inherited from more than
# one type it's very likely to be broken
#
# If not the recalculation will calculate
# the same result.
broken = True
continue
last_inherited_type = t
aces = []
if sd.dacl is not None:
aces = sd.dacl.aces
for i in range(0, len(aces)):
ace = aces[i]
if not ace.flags & security.SEC_ACE_FLAG_INHERITED_ACE:
sd_clean.dacl_add(ace)
continue
t = self.ace_get_effective_inherited_type(ace)
if t is None:
continue
if last_inherited_type is not None:
if t != last_inherited_type:
# if it inherited from more than
# one type it's very likely to be broken
#
# If not the recalculation will calculate
# the same result.
broken = True
continue
last_inherited_type = t
if broken:
return (sd_clean, sd)
if last_inherited_type is None:
# ok
return (sd, None)
cls = None
try:
cls = obj["objectClass"][-1]
except KeyError, e:
pass
if cls is None:
res = self.samdb.search(base=dn, scope=ldb.SCOPE_BASE,
attrs=["isDeleted", "objectClass"],
controls=["show_recycled:1"])
o = res[0]
is_deleted = 'isDeleted' in o and o['isDeleted'][0].upper() == 'TRUE'
if is_deleted:
# we don't fix deleted objects
return (sd, None)
cls = o["objectClass"][-1]
t = self.lookup_class_schemaIDGUID(cls)
if t != last_inherited_type:
# broken
return (sd_clean, sd)
# ok
return (sd, None)
def err_wrong_sd(self, dn, sd, sd_broken):
'''re-write the SD due to incorrect inherited ACEs'''
sd_attr = "nTSecurityDescriptor"
sd_val = ndr_pack(sd)
sd_flags = security.SECINFO_DACL | security.SECINFO_SACL
if not self.confirm_all('Fix %s on %s?' % (sd_attr, dn), 'fix_ntsecuritydescriptor'):
self.report('Not fixing %s on %s\n' % (sd_attr, dn))
return
nmsg = ldb.Message()
nmsg.dn = dn
nmsg[sd_attr] = ldb.MessageElement(sd_val, ldb.FLAG_MOD_REPLACE, sd_attr)
if self.do_modify(nmsg, ["sd_flags:1:%d" % sd_flags],
"Failed to fix attribute %s" % sd_attr):
self.report("Fixed attribute '%s' of '%s'\n" % (sd_attr, dn))
def err_wrong_default_sd(self, dn, sd, sd_old, diff):
'''re-write the SD due to not matching the default (optional mode for fixing an incorrect provision)'''
sd_attr = "nTSecurityDescriptor"
sd_val = ndr_pack(sd)
sd_old_val = ndr_pack(sd_old)
sd_flags = security.SECINFO_DACL | security.SECINFO_SACL
if sd.owner_sid is not None:
sd_flags |= security.SECINFO_OWNER
if sd.group_sid is not None:
sd_flags |= security.SECINFO_GROUP
if not self.confirm_all('Reset %s on %s back to provision default?\n%s' % (sd_attr, dn, diff), 'reset_all_well_known_acls'):
self.report('Not resetting %s on %s\n' % (sd_attr, dn))
return
m = ldb.Message()
m.dn = dn
m[sd_attr] = ldb.MessageElement(sd_val, ldb.FLAG_MOD_REPLACE, sd_attr)
if self.do_modify(m, ["sd_flags:1:%d" % sd_flags],
"Failed to reset attribute %s" % sd_attr):
self.report("Fixed attribute '%s' of '%s'\n" % (sd_attr, dn))
def err_missing_sd_owner(self, dn, sd):
'''re-write the SD due to a missing owner or group'''
sd_attr = "nTSecurityDescriptor"
sd_val = ndr_pack(sd)
sd_flags = security.SECINFO_OWNER | security.SECINFO_GROUP
if not self.confirm_all('Fix missing owner or group in %s on %s?' % (sd_attr, dn), 'fix_ntsecuritydescriptor_owner_group'):
self.report('Not fixing missing owner or group %s on %s\n' % (sd_attr, dn))
return
nmsg = ldb.Message()
nmsg.dn = dn
nmsg[sd_attr] = ldb.MessageElement(sd_val, ldb.FLAG_MOD_REPLACE, sd_attr)
# By setting the session_info to admin_session_info and
# setting the security.SECINFO_OWNER | security.SECINFO_GROUP
# flags we cause the descriptor module to set the correct
# owner and group on the SD, replacing the None/NULL values
# for owner_sid and group_sid currently present.
#
# The admin_session_info matches that used in provision, and
# is the best guess we can make for an existing object that
# hasn't had something specifically set.
#
# This is important for the dns related naming contexts.
self.samdb.set_session_info(self.admin_session_info)
if self.do_modify(nmsg, ["sd_flags:1:%d" % sd_flags],
"Failed to fix metadata for attribute %s" % sd_attr):
self.report("Fixed attribute '%s' of '%s'\n" % (sd_attr, dn))
self.samdb.set_session_info(self.system_session_info)
def has_replmetadata_zero_invocationid(self, dn, repl_meta_data):
repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob,
str(repl_meta_data))
ctr = repl.ctr
found = False
for o in ctr.array:
# Search for a zero invocationID
if o.originating_invocation_id != misc.GUID("00000000-0000-0000-0000-000000000000"):
continue
found = True
self.report('''ERROR: on replPropertyMetaData of %s, the instanceType on attribute 0x%08x,
version %d changed at %s is 00000000-0000-0000-0000-000000000000,
but should be non-zero. Proposed fix is to set to our invocationID (%s).'''
% (dn, o.attid, o.version,
time.ctime(samba.nttime2unix(o.originating_change_time)),
self.samdb.get_invocation_id()))
return found
def err_replmetadata_zero_invocationid(self, dn, attr, repl_meta_data):
repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob,
str(repl_meta_data))
ctr = repl.ctr
now = samba.unix2nttime(int(time.time()))
found = False
for o in ctr.array:
# Search for a zero invocationID
if o.originating_invocation_id != misc.GUID("00000000-0000-0000-0000-000000000000"):
continue
found = True
seq = self.samdb.sequence_number(ldb.SEQ_NEXT)
o.version = o.version + 1
o.originating_change_time = now
o.originating_invocation_id = misc.GUID(self.samdb.get_invocation_id())
o.originating_usn = seq
o.local_usn = seq
if found:
replBlob = ndr_pack(repl)
msg = ldb.Message()
msg.dn = dn
if not self.confirm_all('Fix %s on %s by setting originating_invocation_id on some elements to our invocationID %s?'
% (attr, dn, self.samdb.get_invocation_id()), 'fix_replmetadata_zero_invocationid'):
self.report('Not fixing %s on %s\n' % (attr, dn))
return
nmsg = ldb.Message()
nmsg.dn = dn
nmsg[attr] = ldb.MessageElement(replBlob, ldb.FLAG_MOD_REPLACE, attr)
if self.do_modify(nmsg, ["local_oid:1.3.6.1.4.1.7165.4.3.14:0"],
"Failed to fix attribute %s" % attr):
self.report("Fixed attribute '%s' of '%s'\n" % (attr, dn))
def is_deleted_deleted_objects(self, obj):
faulty = False
if "description" not in obj:
self.report("ERROR: description not present on Deleted Objects container %s" % obj.dn)
faulty = True
if "showInAdvancedViewOnly" not in obj:
self.report("ERROR: showInAdvancedViewOnly not present on Deleted Objects container %s" % obj.dn)
faulty = True
if "objectCategory" not in obj:
self.report("ERROR: objectCategory not present on Deleted Objects container %s" % obj.dn)
faulty = True
if "isCriticalSystemObject" not in obj:
self.report("ERROR: isCriticalSystemObject not present on Deleted Objects container %s" % obj.dn)
faulty = True
if "isRecycled" in obj:
self.report("ERROR: isRecycled present on Deleted Objects container %s" % obj.dn)
faulty = True
return faulty
def err_deleted_deleted_objects(self, obj):
nmsg = ldb.Message()
nmsg.dn = dn = obj.dn
if "description" not in obj:
nmsg["description"] = ldb.MessageElement("Container for deleted objects", ldb.FLAG_MOD_REPLACE, "description")
if "showInAdvancedViewOnly" not in obj:
nmsg["showInAdvancedViewOnly"] = ldb.MessageElement("TRUE", ldb.FLAG_MOD_REPLACE, "showInAdvancedViewOnly")
if "objectCategory" not in obj:
nmsg["objectCategory"] = ldb.MessageElement("CN=Container,%s" % self.schema_dn, ldb.FLAG_MOD_REPLACE, "objectCategory")
if "isCriticalSystemObject" not in obj:
nmsg["isCriticalSystemObject"] = ldb.MessageElement("TRUE", ldb.FLAG_MOD_REPLACE, "isCriticalSystemObject")
if "isRecycled" in obj:
nmsg["isRecycled"] = ldb.MessageElement("TRUE", ldb.FLAG_MOD_DELETE, "isRecycled")
if not self.confirm_all('Fix Deleted Objects container %s by restoring default attributes?'
% (dn), 'fix_deleted_deleted_objects'):
self.report('Not fixing missing/incorrect attributes on %s\n' % (dn))
return
if self.do_modify(nmsg, ["relax:0"],
"Failed to fix Deleted Objects container %s" % dn):
self.report("Fixed Deleted Objects container '%s'\n" % (dn))
def is_fsmo_role(self, dn):
if dn == self.samdb.domain_dn:
return True
if dn == self.infrastructure_dn:
return True
if dn == self.naming_dn:
return True
if dn == self.schema_dn:
return True
if dn == self.rid_dn:
return True
return False
def calculate_instancetype(self, dn):
instancetype = 0
nc_root = self.samdb.get_nc_root(dn)
if dn == nc_root:
instancetype |= dsdb.INSTANCE_TYPE_IS_NC_HEAD
try:
self.samdb.search(base=dn.parent(), scope=ldb.SCOPE_BASE, attrs=[], controls=["show_recycled:1"])
except ldb.LdbError, (enum, estr):
if enum != ldb.ERR_NO_SUCH_OBJECT:
raise
else:
instancetype |= dsdb.INSTANCE_TYPE_NC_ABOVE
if self.write_ncs is not None and str(nc_root) in self.write_ncs:
instancetype |= dsdb.INSTANCE_TYPE_WRITE
return instancetype
def get_wellknown_sd(self, dn):
for [sd_dn, descriptor_fn] in self.wellknown_sds:
if dn == sd_dn:
domain_sid = security.dom_sid(self.samdb.get_domain_sid())
return ndr_unpack(security.descriptor,
descriptor_fn(domain_sid,
name_map=self.name_map))
raise KeyError
def check_object(self, dn, attrs=['*']):
'''check one object'''
if self.verbose:
self.report("Checking object %s" % dn)
if '*' in attrs:
attrs.append("replPropertyMetaData")
try:
sd_flags = 0
sd_flags |= security.SECINFO_OWNER
sd_flags |= security.SECINFO_GROUP
sd_flags |= security.SECINFO_DACL
sd_flags |= security.SECINFO_SACL
res = self.samdb.search(base=dn, scope=ldb.SCOPE_BASE,
controls=[
"extended_dn:1:1",
"show_recycled:1",
"show_deleted:1",
"sd_flags:1:%d" % sd_flags,
],
attrs=attrs)
except ldb.LdbError, (enum, estr):
if enum == ldb.ERR_NO_SUCH_OBJECT:
if self.in_transaction:
self.report("ERROR: Object %s disappeared during check" % dn)
return 1
return 0
raise
if len(res) != 1:
self.report("ERROR: Object %s failed to load during check" % dn)
return 1
obj = res[0]
error_count = 0
list_attrs_from_md = []
list_attrs_seen = []
got_repl_property_meta_data = False
for attrname in obj:
if attrname == 'dn':
continue
if str(attrname).lower() == 'replpropertymetadata':
if self.has_replmetadata_zero_invocationid(dn, obj[attrname]):
error_count += 1
self.err_replmetadata_zero_invocationid(dn, attrname, obj[attrname])
# We don't continue, as we may also have other fixes for this attribute
# based on what other attributes we see.
list_attrs_from_md = self.process_metadata(obj[attrname])
got_repl_property_meta_data = True
continue
if str(attrname).lower() == 'ntsecuritydescriptor':
(sd, sd_broken) = self.process_sd(dn, obj)
if sd_broken is not None:
self.err_wrong_sd(dn, sd, sd_broken)
error_count += 1
continue
if sd.owner_sid is None or sd.group_sid is None:
self.err_missing_sd_owner(dn, sd)
error_count += 1
continue
if self.reset_well_known_acls:
try:
well_known_sd = self.get_wellknown_sd(dn)
except KeyError:
continue
current_sd = ndr_unpack(security.descriptor,
str(obj[attrname][0]))
diff = get_diff_sds(well_known_sd, current_sd, security.dom_sid(self.samdb.get_domain_sid()))
if diff != "":
self.err_wrong_default_sd(dn, well_known_sd, current_sd, diff)
error_count += 1
continue
continue
if str(attrname).lower() == 'objectclass':
normalised = self.samdb.dsdb_normalise_attributes(self.samdb_schema, attrname, list(obj[attrname]))
if list(normalised) != list(obj[attrname]):
self.err_normalise_mismatch_replace(dn, attrname, list(obj[attrname]))
error_count += 1
continue
# check for empty attributes
for val in obj[attrname]:
if val == '':
self.err_empty_attribute(dn, attrname)
error_count += 1
continue
# get the syntax oid for the attribute, so we can can have
# special handling for some specific attribute types
try:
syntax_oid = self.samdb_schema.get_syntax_oid_from_lDAPDisplayName(attrname)
except Exception, msg:
self.err_unknown_attribute(obj, attrname)
error_count += 1
continue
flag = self.samdb_schema.get_systemFlags_from_lDAPDisplayName(attrname)
if (not flag & dsdb.DS_FLAG_ATTR_NOT_REPLICATED
and not flag & dsdb.DS_FLAG_ATTR_IS_CONSTRUCTED
and not self.samdb_schema.get_linkId_from_lDAPDisplayName(attrname)):
list_attrs_seen.append(str(attrname).lower())
if syntax_oid in [ dsdb.DSDB_SYNTAX_BINARY_DN, dsdb.DSDB_SYNTAX_OR_NAME,
dsdb.DSDB_SYNTAX_STRING_DN, ldb.SYNTAX_DN ]:
# it's some form of DN, do specialised checking on those
error_count += self.check_dn(obj, attrname, syntax_oid)
# check for incorrectly normalised attributes
for val in obj[attrname]:
normalised = self.samdb.dsdb_normalise_attributes(self.samdb_schema, attrname, [val])
if len(normalised) != 1 or normalised[0] != val:
self.err_normalise_mismatch(dn, attrname, obj[attrname])
error_count += 1
break
if str(attrname).lower() == "instancetype":
calculated_instancetype = self.calculate_instancetype(dn)
if len(obj["instanceType"]) != 1 or obj["instanceType"][0] != str(calculated_instancetype):
error_count += 1
self.err_wrong_instancetype(obj, calculated_instancetype)
show_dn = True
if got_repl_property_meta_data:
rdn = (str(dn).split(","))[0]
if rdn == "CN=Deleted Objects":
isDeletedAttId = 131120
# It's 29/12/9999 at 23:59:59 UTC as specified in MS-ADTS 7.1.1.4.2 Deleted Objects Container
expectedTimeDo = 2650466015990000000
originating = self.get_originating_time(obj["replPropertyMetaData"], isDeletedAttId)
if originating != expectedTimeDo:
if self.confirm_all("Fix isDeleted originating_change_time on '%s'" % str(dn), 'fix_time_metadata'):
nmsg = ldb.Message()
nmsg.dn = dn
nmsg["isDeleted"] = ldb.MessageElement("TRUE", ldb.FLAG_MOD_REPLACE, "isDeleted")
error_count += 1
self.samdb.modify(nmsg, controls=["provision:0"])
else:
self.report("Not fixing isDeleted originating_change_time on '%s'" % str(dn))
for att in list_attrs_seen:
if not att in list_attrs_from_md:
if show_dn:
self.report("On object %s" % dn)
show_dn = False
error_count += 1
self.report("ERROR: Attribute %s not present in replication metadata" % att)
if not self.confirm_all("Fix missing replPropertyMetaData element '%s'" % att, 'fix_all_metadata'):
self.report("Not fixing missing replPropertyMetaData element '%s'" % att)
continue
self.fix_metadata(dn, att)
if self.is_fsmo_role(dn):
if "fSMORoleOwner" not in obj:
self.err_no_fsmoRoleOwner(obj)
error_count += 1
try:
if dn != self.samdb.get_root_basedn():
res = self.samdb.search(base=dn.parent(), scope=ldb.SCOPE_BASE,
controls=["show_recycled:1", "show_deleted:1"])
except ldb.LdbError, (enum, estr):
if enum == ldb.ERR_NO_SUCH_OBJECT:
self.err_missing_parent(obj)
error_count += 1
else:
raise
if dn in self.deleted_objects_containers and '*' in attrs:
if self.is_deleted_deleted_objects(obj):
self.err_deleted_deleted_objects(obj)
error_count += 1
return error_count
################################################################
# check special @ROOTDSE attributes
def check_rootdse(self):
'''check the @ROOTDSE special object'''
dn = ldb.Dn(self.samdb, '@ROOTDSE')
if self.verbose:
self.report("Checking object %s" % dn)
res = self.samdb.search(base=dn, scope=ldb.SCOPE_BASE)
if len(res) != 1:
self.report("Object %s disappeared during check" % dn)
return 1
obj = res[0]
error_count = 0
# check that the dsServiceName is in GUID form
if not 'dsServiceName' in obj:
self.report('ERROR: dsServiceName missing in @ROOTDSE')
return error_count+1
if not obj['dsServiceName'][0].startswith('<GUID='):
self.report('ERROR: dsServiceName not in GUID form in @ROOTDSE')
error_count += 1
if not self.confirm('Change dsServiceName to GUID form?'):
return error_count
res = self.samdb.search(base=ldb.Dn(self.samdb, obj['dsServiceName'][0]),
scope=ldb.SCOPE_BASE, attrs=['objectGUID'])
guid_str = str(ndr_unpack(misc.GUID, res[0]['objectGUID'][0]))
m = ldb.Message()
m.dn = dn
m['dsServiceName'] = ldb.MessageElement("<GUID=%s>" % guid_str,
ldb.FLAG_MOD_REPLACE, 'dsServiceName')
if self.do_modify(m, [], "Failed to change dsServiceName to GUID form", validate=False):
self.report("Changed dsServiceName to GUID form")
return error_count
###############################################
# re-index the database
def reindex_database(self):
'''re-index the whole database'''
m = ldb.Message()
m.dn = ldb.Dn(self.samdb, "@ATTRIBUTES")
m['add'] = ldb.MessageElement('NONE', ldb.FLAG_MOD_ADD, 'force_reindex')
m['delete'] = ldb.MessageElement('NONE', ldb.FLAG_MOD_DELETE, 'force_reindex')
return self.do_modify(m, [], 're-indexed database', validate=False)
###############################################
# reset @MODULES
def reset_modules(self):
'''reset @MODULES to that needed for current sam.ldb (to read a very old database)'''
m = ldb.Message()
m.dn = ldb.Dn(self.samdb, "@MODULES")
m['@LIST'] = ldb.MessageElement('samba_dsdb', ldb.FLAG_MOD_REPLACE, '@LIST')
return self.do_modify(m, [], 'reset @MODULES on database', validate=False)
|
CLVsol/odoo_cl_addons | refs/heads/master | cl_place/category/__init__.py | 1 | # -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
import cl_place_category
|
pandeydivesh15/item_sharing_portal | refs/heads/master | src/post/views.py | 1 | from django.shortcuts import render, get_object_or_404, redirect
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import HttpResponseRedirect
from django.contrib import messages
# Create your views here.
from .models import Post
from User.models import User, check_if_auth_user
import home.views
def posts_list(request, querySet_list, context_data = None): # Listing Posts
paginator = Paginator(querySet_list, 6) # Show 5 queries per page
page = request.GET.get('page') #'page' denotes the page number
try:
querySet = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
querySet = paginator.page(1)
except EmptyPage:
#If page is out of range (e.g. 9999), deliver last page of results.
querySet = paginator.page(paginator.num_pages)
new_context_data={
"all_posts": querySet
}
new_context_data.update(context_data)
return render(request, "postsList.html", new_context_data)
#CRUD implemented here
def posts_create(request):
check = check_if_auth_user(request)
if not check:
messages.error(request, "Perform login first to create new post")
return redirect("home:welcome")
current_user = User.objects.filter(user_id = check)[0]
if request.method == "POST":
title = request.POST.get('item_title')
disc = request.POST.get('item_disc')
category = request.POST.get('item_category')
image = request.FILES['item_image']
price = request.POST.get('item_price')
reason_post = request.POST.get('reason_item')
new_post = Post(
title = title,
author = current_user,
reason_post = reason_post,
description = disc,
category = category,
image = image,
price = price)
new_post.save()
messages.success(request, "New Post Created")
return redirect(new_post.getAbsoluteURL())
contextData={
"user" : current_user,
"category" : home.views.CATEGORIES,
}
return render(request,"createPost.html",contextData)
def posts_detail(request,id=None):
instance=get_object_or_404(Post,id=id)
check = check_if_auth_user(request)
current_user = None
if check:
current_user = User.objects.filter(user_id = check)[0]
contextData={
"user" : current_user,
"post_obj": instance,
}
return render(request, "showPost.html", contextData)
def posts_update(request,id=None):
check = check_if_auth_user(request)
if not check:
messages.error(request, "Perform login first to edit any post")
return redirect("home:welcome")
current_user = User.objects.filter(user_id = check)[0]
instance=get_object_or_404(Post,id=id)
if instance.author.user_id != current_user.user_id:
messages.error(request, "You can't edit this post." + str(instance.author.user_id))
return redirect("home:welcome")
contextData={
"user" : current_user,
"post_obj": instance,
"category" : home.views.CATEGORIES,
}
if request.method == "POST":
title = request.POST.get('item_title')
disc = request.POST.get('item_disc')
category = request.POST.get('item_category')
image = request.FILES['item_image']
price = request.POST.get('item_price')
reason_post = request.POST.get('reason_item')
if reason_post == "lostfound":
price = 0
instance.title = title
instance.description = disc
instance.category = category
instance.image = image
instance.price = price
instance.reason_post = reason_post
instance.save()
messages.success(request, "Post updated")
return redirect(instance.getAbsoluteURL())
return render(request, "editPost.html", contextData)
def posts_delete(request, id=None):
check = check_if_auth_user(request)
if not check:
messages.error(request, "Perform login first to delete any post")
return redirect("home:welcome")
current_user = User.objects.filter(user_id = check)[0]
instance=get_object_or_404(Post,id=id)
if instance.author != current_user:
messages.error(request, "You can't delete this post.")
else:
instance.delete()
messages.success(request,"Post successfully deleted")
return redirect("home:welcome")
|
tximikel/kuma | refs/heads/master | vendor/packages/translate/storage/properties.py | 24 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2004-2014 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Classes that hold units of .properties, and similar, files that are used in
translating Java, Mozilla, MacOS and other software.
The :class:`propfile` class is a monolingual class with :class:`propunit`
providing unit level access.
The .properties store has become a general key value pair class with
:class:`Dialect` providing the ability to change the behaviour of the
parsing and handling of the various dialects.
Currently we support:
- Java .properties
- Mozilla .properties
- Adobe Flex files
- MacOS X .strings files
- Skype .lang files
The following provides references and descriptions of the various
dialects supported:
Java
Java .properties are supported completely except for the ability to drop
pairs that are not translated.
The following `.properties file description
<http://docs.oracle.com/javase/1.4.2/docs/api/java/util/Properties.html#load(java.io.InputStream)>`_
gives a good references to the .properties specification.
Properties file may also hold Java `MessageFormat
<http://docs.oracle.com/javase/1.4.2/docs/api/java/text/MessageFormat.html>`_
messages. No special handling is provided in this storage class for
MessageFormat, but this may be implemented in future.
All delimiter types, comments, line continuations and spaces handling in
delimeters are supported.
Mozilla
Mozilla files use '=' as a delimiter, are UTF-8 encoded and thus don't
need \\u escaping. Any \\U values will be converted to correct Unicode
characters.
Strings
Mac OS X strings files are implemented using
`these <https://developer.apple.com/library/mac/#documentation/MacOSX/Conceptual/BPInternational/Articles/StringsFiles.html>`_
`two <https://developer.apple.com/library/mac/#documentation/Cocoa/Conceptual/LoadingResources/Strings/Strings.html>`_
articles as references.
Flex
Adobe Flex files seem to be normal .properties files but in UTF-8 just like
Mozilla files. This
`page <http://livedocs.adobe.com/flex/3/html/help.html?content=l10n_3.html>`_
provides the information used to implement the dialect.
Skype
Skype .lang files seem to be UTF-16 encoded .properties files.
A simple summary of what is permissible follows.
Comments supported:
.. code-block:: properties
# a comment
! a comment
// a comment (only at the beginning of a line)
/* a comment (not across multiple lines) */
Name and Value pairs:
.. code-block:: properties
# Delimiters
key = value
key : value
key value
# Space in key and around value
\ key\ = \ value
# Note that the b and c are escaped for reST rendering
b = a string with escape sequences \\t \\n \\r \\\\ \\" \\' \\ (space) \u0123
c = a string with a continuation line \\
continuation line
# Special cases
# key with no value
key
# value no key (extractable in prop2po but not mergeable in po2prop)
=value
# .strings specific
"key" = "value";
"""
import re
from translate.lang import data
from translate.misc import quote
from translate.misc.deprecation import deprecated
from translate.storage import base
labelsuffixes = (".label", ".title")
"""Label suffixes: entries with this suffix are able to be comibed with accesskeys
found in in entries ending with :attr:`.accesskeysuffixes`"""
accesskeysuffixes = (".accesskey", ".accessKey", ".akey")
"""Accesskey Suffixes: entries with this suffix may be combined with labels
ending in :attr:`.labelsuffixes` into accelerator notation"""
# the rstripeols convert dos <-> unix nicely as well
# output will be appropriate for the platform
eol = "\n"
def _find_delimiter(line, delimiters):
"""Find the type and position of the delimiter in a property line.
Property files can be delimited by "=", ":" or whitespace (space for now).
We find the position of each delimiter, then find the one that appears
first.
:param line: A properties line
:type line: str
:param delimiters: valid delimiters
:type delimiters: list
:return: delimiter character and offset within *line*
:rtype: Tuple (delimiter char, Offset Integer)
"""
delimiter_dict = {}
for delimiter in delimiters:
delimiter_dict[delimiter] = -1
delimiters = delimiter_dict
# Find the position of each delimiter type
for delimiter, pos in delimiters.iteritems():
prewhitespace = len(line) - len(line.lstrip())
pos = line.find(delimiter, prewhitespace)
while pos != -1:
if delimiters[delimiter] == -1 and line[pos-1] != u"\\":
delimiters[delimiter] = pos
break
pos = line.find(delimiter, pos + 1)
# Find the first delimiter
mindelimiter = None
minpos = -1
for delimiter, pos in delimiters.iteritems():
if pos == -1 or delimiter == u" ":
continue
if minpos == -1 or pos < minpos:
minpos = pos
mindelimiter = delimiter
if mindelimiter is None and delimiters.get(u" ", -1) != -1:
# Use space delimiter if we found nothing else
return (u" ", delimiters[" "])
if (mindelimiter is not None and
u" " in delimiters and
delimiters[u" "] < delimiters[mindelimiter]):
# If space delimiter occurs earlier than ":" or "=" then it is the
# delimiter only if there are non-whitespace characters between it and
# the other detected delimiter.
if len(line[delimiters[u" "]:delimiters[mindelimiter]].strip()) > 0:
return (u" ", delimiters[u" "])
return (mindelimiter, minpos)
@deprecated("Use Dialect.find_delimiter instead")
def find_delimeter(line):
"""Misspelled function that is kept around in case someone relies on it.
.. deprecated:: 1.7.0
Use :func:`find_delimiter` instead
"""
return _find_delimiter(line, DialectJava.delimiters)
def is_line_continuation(line):
"""Determine whether *line* has a line continuation marker.
.properties files can be terminated with a backslash (\\) indicating
that the 'value' continues on the next line. Continuation is only
valid if there are an odd number of backslashses (an even number
would result in a set of N/2 slashes not an escape)
:param line: A properties line
:type line: str
:return: Does *line* end with a line continuation
:rtype: Boolean
"""
pos = -1
count = 0
if len(line) == 0:
return False
# Count the slashes from the end of the line. Ensure we don't
# go into infinite loop.
while len(line) >= -pos and line[pos:][0] == "\\":
pos -= 1
count += 1
return (count % 2) == 1 # Odd is a line continuation, even is not
def is_comment_one_line(line):
"""Determine whether a *line* is a one-line comment.
:param line: A properties line
:type line: unicode
:return: True if line is a one-line comment
:rtype: bool
"""
stripped = line.strip()
line_starters = (u'#', u'!', u'//', )
for starter in line_starters:
if stripped.startswith(starter):
return True
if stripped.startswith(u'/*') and stripped.endswith(u'*/'):
return True
return False
def is_comment_start(line):
"""Determine whether a *line* starts a new multi-line comment.
:param line: A properties line
:type line: unicode
:return: True if line starts a new multi-line comment
:rtype: bool
"""
stripped = line.strip()
return stripped.startswith('/*') and not stripped.endswith('*/')
def is_comment_end(line):
"""Determine whether a *line* ends a new multi-line comment.
:param line: A properties line
:type line: unicode
:return: True if line ends a new multi-line comment
:rtype: bool
"""
stripped = line.strip()
return not stripped.startswith('/*') and stripped.endswith('*/')
def _key_strip(key):
"""Cleanup whitespace found around a key
:param key: A properties key
:type key: str
:return: Key without any unneeded whitespace
:rtype: str
"""
newkey = key.rstrip()
# If string now ends in \ we put back the whitespace that was escaped
if newkey[-1:] == "\\":
newkey += key[len(newkey):len(newkey)+1]
return newkey.lstrip()
dialects = {}
default_dialect = "java"
def register_dialect(dialect):
"""Decorator that registers the dialect."""
dialects[dialect.name] = dialect
return dialect
def get_dialect(dialect=default_dialect):
return dialects.get(dialect)
class Dialect(object):
"""Settings for the various behaviours in key=value files."""
name = None
default_encoding = 'iso-8859-1'
delimiters = None
pair_terminator = u""
key_wrap_char = u""
value_wrap_char = u""
drop_comments = []
@classmethod
def encode(cls, string, encoding=None):
"""Encode the string"""
# FIXME: dialects are a bad idea, not possible for subclasses
# to override key methods
if encoding != "utf-8":
return quote.javapropertiesencode(string or u"")
return string or u""
@classmethod
def find_delimiter(cls, line):
"""Find the delimiter"""
return _find_delimiter(line, cls.delimiters)
@classmethod
def key_strip(cls, key):
"""Strip unneeded characters from the key"""
return _key_strip(key)
@classmethod
def value_strip(cls, value):
"""Strip unneeded characters from the value"""
return value.lstrip()
@register_dialect
class DialectJava(Dialect):
name = "java"
default_encoding = "iso-8859-1"
delimiters = [u"=", u":", u" "]
@register_dialect
class DialectJavaUtf8(DialectJava):
name = "java-utf8"
default_encoding = "utf-8"
delimiters = [u"=", u":", u" "]
@classmethod
def encode(cls, string, encoding=None):
return quote.mozillapropertiesencode(string or u"")
@register_dialect
class DialectFlex(DialectJava):
name = "flex"
default_encoding = "utf-8"
@register_dialect
class DialectMozilla(DialectJavaUtf8):
name = "mozilla"
delimiters = [u"="]
@classmethod
def encode(cls, string, encoding=None):
"""Encode the string"""
string = quote.mozillapropertiesencode(string or u"")
string = quote.mozillaescapemarginspaces(string or u"")
return string
@register_dialect
class DialectGaia(DialectMozilla):
name = "gaia"
delimiters = [u"="]
@register_dialect
class DialectSkype(Dialect):
name = "skype"
default_encoding = "utf-16"
delimiters = [u"="]
@classmethod
def encode(cls, string, encoding=None):
return quote.mozillapropertiesencode(string or u"")
@register_dialect
class DialectStrings(Dialect):
name = "strings"
default_encoding = "utf-16"
delimiters = [u"="]
pair_terminator = u";"
key_wrap_char = u'"'
value_wrap_char = u'"'
out_ending = u';'
out_delimiter_wrappers = u' '
drop_comments = ["/* No comment provided by engineer. */"]
@classmethod
def key_strip(cls, key):
"""Strip unneeded characters from the key"""
newkey = key.rstrip().rstrip('"')
# If string now ends in \ we put back the char that was escaped
if newkey[-1:] == "\\":
newkey += key[len(newkey):len(newkey)+1]
ret = newkey.lstrip().lstrip('"')
return ret.replace('\\"', '"')
@classmethod
def value_strip(cls, value):
"""Strip unneeded characters from the value"""
newvalue = value.rstrip().rstrip(';').rstrip('"')
# If string now ends in \ we put back the char that was escaped
if newvalue[-1:] == "\\":
newvalue += value[len(newvalue):len(newvalue)+1]
ret = newvalue.lstrip().lstrip('"')
return ret.replace('\\"', '"')
@classmethod
def encode(cls, string, encoding=None):
return string.replace("\n", r"\n").replace("\t", r"\t")
@register_dialect
class DialectStringsUtf8(DialectStrings):
name = "strings-utf8"
default_encoding = "utf-8"
class propunit(base.TranslationUnit):
"""An element of a properties file i.e. a name and value, and any
comments associated."""
def __init__(self, source="", personality="java"):
"""Construct a blank propunit."""
self.personality = get_dialect(personality)
super(propunit, self).__init__(source)
self.name = u""
self.value = u""
self.translation = u""
self.delimiter = u"="
self.comments = []
self.source = source
# a pair of symbols to enclose delimiter on the output
# (a " " can be used for the sake of convenience)
self.out_delimiter_wrappers = getattr(self.personality,
'out_delimiter_wrappers', u'')
# symbol that should end every property sentence
# (e.g. ";" is required for Mac OS X strings)
self.out_ending = getattr(self.personality, 'out_ending', u'')
def getsource(self):
value = quote.propertiesdecode(self.value)
return value
def setsource(self, source):
self._rich_source = None
source = data.forceunicode(source)
self.value = self.personality.encode(source or u"", self.encoding)
source = property(getsource, setsource)
def gettarget(self):
translation = quote.propertiesdecode(self.translation)
translation = re.sub(u"\\\\ ", u" ", translation)
return translation
def settarget(self, target):
self._rich_target = None
target = data.forceunicode(target)
self.translation = self.personality.encode(target or u"",
self.encoding)
target = property(gettarget, settarget)
@property
def encoding(self):
if self._store:
return self._store.encoding
else:
return self.personality.default_encoding
def __str__(self):
"""Convert to a string. Double check that unicode is handled
somehow here."""
source = self.getoutput()
assert isinstance(source, unicode)
return source.encode(self.encoding)
def getoutput(self):
"""Convert the element back into formatted lines for a
.properties file"""
notes = self.getnotes()
if notes:
notes += u"\n"
if self.isblank():
return notes + u"\n"
else:
self.value = self.personality.encode(self.source, self.encoding)
self.translation = self.personality.encode(self.target,
self.encoding)
# encode key, if needed
key = self.name
kwc = self.personality.key_wrap_char
if kwc:
key = key.replace(kwc, '\\%s' % kwc)
key = '%s%s%s' % (kwc, key, kwc)
# encode value, if needed
value = self.translation or self.value
vwc = self.personality.value_wrap_char
if vwc:
value = value.replace(vwc, '\\%s' % vwc)
value = '%s%s%s' % (vwc, value, vwc)
wrappers = self.out_delimiter_wrappers
delimiter = '%s%s%s' % (wrappers, self.delimiter, wrappers)
ending = self.out_ending
out_dict = {
"notes": notes,
"key": key,
"del": delimiter,
"value": value,
"ending": ending,
}
return u"%(notes)s%(key)s%(del)s%(value)s%(ending)s\n" % out_dict
def getlocations(self):
return [self.name]
def addnote(self, text, origin=None, position="append"):
if origin in ['programmer', 'developer', 'source code', None]:
text = data.forceunicode(text)
self.comments.append(text)
else:
return super(propunit, self).addnote(text, origin=origin,
position=position)
def getnotes(self, origin=None):
if origin in ['programmer', 'developer', 'source code', None]:
return u'\n'.join(self.comments)
else:
return super(propunit, self).getnotes(origin)
def removenotes(self):
self.comments = []
def isblank(self):
"""returns whether this is a blank element, containing only
comments."""
return not (self.name or self.value)
def istranslatable(self):
return bool(self.name)
def getid(self):
return self.name
def setid(self, value):
self.name = value
class propfile(base.TranslationStore):
"""this class represents a .properties file, made up of propunits"""
UnitClass = propunit
def __init__(self, inputfile=None, personality="java", encoding=None):
"""construct a propfile, optionally reading in from inputfile"""
super(propfile, self).__init__(unitclass=self.UnitClass)
self.personality = get_dialect(personality)
self.encoding = encoding or self.personality.default_encoding
self.filename = getattr(inputfile, 'name', '')
if inputfile is not None:
propsrc = inputfile.read()
inputfile.close()
self.parse(propsrc)
self.makeindex()
def parse(self, propsrc):
"""Read the source of a properties file in and include them
as units."""
text, encoding = self.detect_encoding(propsrc,
default_encodings=[self.personality.default_encoding, 'utf-8',
'utf-16'])
if not text:
raise IOError("Cannot detect encoding for %s." % (self.filename or
"given string"))
self.encoding = encoding
propsrc = text
newunit = propunit("", self.personality.name)
inmultilinevalue = False
inmultilinecomment = False
for line in propsrc.split(u"\n"):
# handle multiline value if we're in one
line = quote.rstripeol(line)
if inmultilinevalue:
newunit.value += line.lstrip()
# see if there's more
inmultilinevalue = is_line_continuation(newunit.value)
# if we're still waiting for more...
if inmultilinevalue:
# strip the backslash
newunit.value = newunit.value[:-1]
if not inmultilinevalue:
# we're finished, add it to the list...
self.addunit(newunit)
newunit = propunit("", self.personality.name)
# otherwise, this could be a comment
# FIXME handle // inline comments
elif (inmultilinecomment or is_comment_one_line(line) or
is_comment_start(line) or is_comment_end(line)):
# add a comment
if line not in self.personality.drop_comments:
newunit.comments.append(line)
if is_comment_start(line):
inmultilinecomment = True
elif is_comment_end(line):
inmultilinecomment = False
elif not line.strip():
# this is a blank line...
if str(newunit).strip():
self.addunit(newunit)
newunit = propunit("", self.personality.name)
else:
newunit.delimiter, delimiter_pos = self.personality.find_delimiter(line)
if delimiter_pos == -1:
newunit.name = self.personality.key_strip(line)
newunit.value = u""
self.addunit(newunit)
newunit = propunit("", self.personality.name)
else:
newunit.name = self.personality.key_strip(line[:delimiter_pos])
if is_line_continuation(line[delimiter_pos+1:].lstrip()):
inmultilinevalue = True
newunit.value = line[delimiter_pos+1:].lstrip()[:-1]
else:
newunit.value = self.personality.value_strip(line[delimiter_pos+1:])
self.addunit(newunit)
newunit = propunit("", self.personality.name)
# see if there is a leftover one...
if inmultilinevalue or len(newunit.comments) > 0:
self.addunit(newunit)
def __str__(self):
"""Convert the units back to lines."""
lines = []
for unit in self.units:
lines.append(unit.getoutput())
uret = u"".join(lines)
return uret.encode(self.encoding)
class javafile(propfile):
Name = "Java Properties"
Extensions = ['properties']
def __init__(self, *args, **kwargs):
kwargs['personality'] = "java"
kwargs['encoding'] = "auto"
super(javafile, self).__init__(*args, **kwargs)
class javautf8file(propfile):
Name = "Java Properties (UTF-8)"
Extensions = ['properties']
def __init__(self, *args, **kwargs):
kwargs['personality'] = "java-utf8"
kwargs['encoding'] = "utf-8"
super(javautf8file, self).__init__(*args, **kwargs)
class stringsfile(propfile):
Name = "OS X Strings"
Extensions = ['strings']
def __init__(self, *args, **kwargs):
kwargs['personality'] = "strings"
super(stringsfile, self).__init__(*args, **kwargs)
class stringsutf8file(propfile):
Name = "OS X Strings (UTF-8)"
Extensions = ['strings']
def __init__(self, *args, **kwargs):
kwargs['personality'] = "strings-utf8"
kwargs['encoding'] = "utf-8"
super(stringsutf8file, self).__init__(*args, **kwargs)
|
Lujeni/ansible | refs/heads/devel | lib/ansible/modules/cloud/vultr/vultr_firewall_group_info.py | 14 | #!/usr/bin/python
#
# (c) 2018, Yanis Guenane <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: vultr_firewall_group_info
short_description: Gather information about the Vultr firewall groups available.
description:
- Gather information about firewall groups available in Vultr.
version_added: "2.9"
author: "Yanis Guenane (@Spredzy)"
extends_documentation_fragment: vultr
'''
EXAMPLES = r'''
- name: Gather Vultr firewall groups information
local_action:
module: vultr_firewall_group_info
register: result
- name: Print the gathered information
debug:
var: result.vultr_firewall_group_info
'''
RETURN = r'''
---
vultr_api:
description: Response from Vultr API with a few additions/modification
returned: success
type: complex
contains:
api_account:
description: Account used in the ini file to select the key
returned: success
type: str
sample: default
api_timeout:
description: Timeout used for the API requests
returned: success
type: int
sample: 60
api_retries:
description: Amount of max retries for the API requests
returned: success
type: int
sample: 5
api_retry_max_delay:
description: Exponential backoff delay in seconds between retries up to this max delay value.
returned: success
type: int
sample: 12
version_added: '2.9'
api_endpoint:
description: Endpoint used for the API requests
returned: success
type: str
sample: "https://api.vultr.com"
vultr_firewall_group_info:
description: Response from Vultr API
returned: success
type: complex
sample:
"vultr_firewall_group_info": [
{
"date_created": "2018-07-12 10:27:14",
"date_modified": "2018-07-12 10:27:14",
"description": "test",
"id": "5e128ff0",
"instance_count": 0,
"max_rule_count": 50,
"rule_count": 0
}
]
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vultr import (
Vultr,
vultr_argument_spec,
)
class AnsibleVultrFirewallGroupInfo(Vultr):
def __init__(self, module):
super(AnsibleVultrFirewallGroupInfo, self).__init__(module, "vultr_firewall_group_info")
self.returns = {
"FIREWALLGROUPID": dict(key='id'),
"date_created": dict(),
"date_modified": dict(),
"description": dict(),
"instance_count": dict(convert_to='int'),
"max_rule_count": dict(convert_to='int'),
"rule_count": dict(convert_to='int')
}
def get_firewall_group(self):
return self.api_query(path="/v1/firewall/group_list")
def parse_fw_group_list(fwgroups_list):
if not fwgroups_list:
return []
return [group for id, group in fwgroups_list.items()]
def main():
argument_spec = vultr_argument_spec()
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
fw_group_info = AnsibleVultrFirewallGroupInfo(module)
result = fw_group_info.get_result(parse_fw_group_list(fw_group_info.get_firewall_group()))
module.exit_json(**result)
if __name__ == '__main__':
main()
|
Subsets and Splits