repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
cysnake4713/wechatpy
|
wechatpy/client/api/material.py
|
3
|
6144
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import requests
from wechatpy._compat import json
from wechatpy.client.api.base import BaseWeChatAPI
class WeChatMaterial(BaseWeChatAPI):
def add_articles(self, articles):
"""
新增永久图文素材
详情请参考
http://mp.weixin.qq.com/wiki/14/7e6c03263063f4813141c3e17dd4350a.html
:param articles: 图文素材数组
:return: 返回的 JSON 数据包
"""
articles_data = []
for article in articles:
articles_data.append({
'thumb_media_id': article['thumb_media_id'],
'title': article['title'],
'content': article['content'],
'author': article.get('author', ''),
'content_source_url': article.get('content_source_url', ''),
'digest': article.get('digest', ''),
'show_cover_pic': article.get('show_cover_pic', 0)
})
return self._post(
'material/add_news',
data={
'articles': articles_data
}
)
def add(self, media_type, media_file, title=None, introduction=None):
"""
新增其它类型永久素材
详情请参考
http://mp.weixin.qq.com/wiki/14/7e6c03263063f4813141c3e17dd4350a.html
:param media_type: 媒体文件类型,分别有图片(image)、语音(voice)、视频(video)和缩略图(thumb)
:param media_file: 要上传的文件,一个 File-object
:param title: 视频素材标题,仅上传视频素材时需要
:param introduction: 视频素材简介,仅上传视频素材时需要
:return: 返回的 JSON 数据包
"""
params = {
'access_token': self.access_token,
'type': media_type
}
if media_type == 'video':
assert title, 'Video title must be set'
assert introduction, 'Video introduction must be set'
description = {
'title': title,
'introduction': introduction
}
params['description'] = json.dumps(description)
return self._post(
url='http://file.api.weixin.qq.com/cgi-bin/material/add_material',
params=params,
files={
'media': media_file
}
)
def get(self, media_id):
"""
获取永久素材
详情请参考
http://mp.weixin.qq.com/wiki/4/b3546879f07623cb30df9ca0e420a5d0.html
:param media_id: 素材的 media_id
:return: 图文素材返回图文列表,其它类型为素材的内容
"""
res = requests.post(
url='https://api.weixin.qq.com/cgi-bin/material/get_material',
params={
'access_token': self.access_token
},
data={
'media_id': media_id
}
)
content_type = res.headers['Content-Type'].lower()
if content_type in ('application/json',
'application/javascript',
'application/x-javascript',
'text/javascript',
'text/json'):
# news item return
return res.json().get('news_item', [])
return res
def delete(self, media_id):
"""
删除永久素材
详情请参考
http://mp.weixin.qq.com/wiki/5/e66f61c303db51a6c0f90f46b15af5f5.html
:param media_id: 素材的 media_id
:return: 返回的 JSON 数据包
"""
return self._post(
'material/del_material',
data={
'media_id': media_id
}
)
def update_articles(self, media_id, index, articles):
"""
修改永久图文素材
详情请参考
http://mp.weixin.qq.com/wiki/4/19a59cba020d506e767360ca1be29450.html
:param media_id: 要修改的图文消息的 id
:param index: 要更新的文章在图文消息中的位置(多图文消息时,此字段才有意义),第一篇为 0
:param articles: 图文素材数组
:return: 返回的 JSON 数据包
"""
articles_data = []
for article in articles:
articles_data.append({
'thumb_media_id': article['thumb_media_id'],
'title': article['title'],
'content': article['content'],
'author': article.get('author', ''),
'content_source_url': article.get('content_source_url', ''),
'digest': article.get('digest', ''),
'show_cover_pic': article.get('show_cover_pic', 0)
})
return self._post(
'material/update_news',
data={
'media_id': media_id,
'index': index,
'articles': articles_data
}
)
def batchget(self, media_type, offset=0, count=20):
"""
批量获取永久素材列表
详情请参考
http://mp.weixin.qq.com/wiki/12/2108cd7aafff7f388f41f37efa710204.html
:param media_type: 媒体文件类型,分别有图片(image)、语音(voice)、视频(video)和缩略图(news)
:param offset: 从全部素材的该偏移位置开始返回,0 表示从第一个素材返回
:param count: 返回素材的数量,取值在1到20之间
:return: 返回的 JSON 数据包
"""
return self._post(
'material/batchget_material',
data={
'type': media_type,
'offset': offset,
'count': count
}
)
def get_count(self):
"""
获取素材总数
详情请参考
http://mp.weixin.qq.com/wiki/16/8cc64f8c189674b421bee3ed403993b8.html
:return: 返回的 JSON 数据包
"""
return self._get('material/get_materialcount')
|
mit
|
RebuiltBits/django-paypal
|
paypal/pro/helpers.py
|
4
|
13557
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import logging
import pprint
import time
import requests
from django.conf import settings
from django.forms.models import fields_for_model
from django.http import QueryDict
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.http import urlencode
from paypal.pro.signals import payment_was_successful, recurring_cancel, recurring_suspend, recurring_reactivate, payment_profile_created
from paypal.pro.models import PayPalNVP
from paypal.pro.exceptions import PayPalFailure
USER = settings.PAYPAL_WPP_USER
PASSWORD = settings.PAYPAL_WPP_PASSWORD
SIGNATURE = settings.PAYPAL_WPP_SIGNATURE
VERSION = 116.0
BASE_PARAMS = dict(USER=USER, PWD=PASSWORD, SIGNATURE=SIGNATURE, VERSION=VERSION)
ENDPOINT = "https://api-3t.paypal.com/nvp"
SANDBOX_ENDPOINT = "https://api-3t.sandbox.paypal.com/nvp"
EXPRESS_ENDPOINT = "https://www.paypal.com/webscr?cmd=_express-checkout&%s"
SANDBOX_EXPRESS_ENDPOINT = "https://www.sandbox.paypal.com/webscr?cmd=_express-checkout&%s"
log = logging.getLogger(__file__)
def paypal_time(time_obj=None):
"""Returns a time suitable for PayPal time fields."""
if time_obj is None:
time_obj = time.gmtime()
return time.strftime(PayPalNVP.TIMESTAMP_FORMAT, time_obj)
def paypaltime2datetime(s):
"""Convert a PayPal time string to a DateTime."""
naive = datetime.datetime.strptime(s, PayPalNVP.TIMESTAMP_FORMAT)
if not settings.USE_TZ:
return naive
else:
# TIMESTAMP_FORMAT is UTC
return timezone.make_aware(naive, timezone.UTC())
class PayPalError(TypeError):
"""Error thrown when something is wrong."""
def express_endpoint():
if getattr(settings, 'PAYPAL_TEST', True):
return SANDBOX_EXPRESS_ENDPOINT
else:
return EXPRESS_ENDPOINT
def express_endpoint_for_token(token, commit=False):
"""
Returns the PayPal Express Checkout endpoint for a token.
Pass 'commit=True' if you will not prompt for confirmation when the user
returns to your site.
"""
pp_params = dict(token=token)
if commit:
pp_params['useraction'] = 'commit'
return express_endpoint() % urlencode(pp_params)
class PayPalWPP(object):
"""
Wrapper class for the PayPal Website Payments Pro.
Website Payments Pro Integration Guide:
https://cms.paypal.com/cms_content/US/en_US/files/developer/PP_WPP_IntegrationGuide.pdf
Name-Value Pair API Developer Guide and Reference:
https://cms.paypal.com/cms_content/US/en_US/files/developer/PP_NVPAPI_DeveloperGuide.pdf
"""
def __init__(self, request=None, params=BASE_PARAMS):
"""Required - USER / PWD / SIGNATURE / VERSION"""
self.request = request
if getattr(settings, 'PAYPAL_TEST', True):
self.endpoint = SANDBOX_ENDPOINT
else:
self.endpoint = ENDPOINT
self.signature_values = params
self.signature = urlencode(self.signature_values) + "&"
@cached_property
def NVP_FIELDS(self):
# Put this onto class and load lazily, because in some cases there is an
# import order problem if we put it at module level.
return list(fields_for_model(PayPalNVP).keys())
def doDirectPayment(self, params):
"""Call PayPal DoDirectPayment method."""
defaults = {"method": "DoDirectPayment", "paymentaction": "Sale"}
required = ["creditcardtype",
"acct",
"expdate",
"cvv2",
"ipaddress",
"firstname",
"lastname",
"street",
"city",
"state",
"countrycode",
"zip",
"amt",
]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj)
payment_was_successful.send(sender=nvp_obj, **params)
# @@@ Could check cvv2match / avscode are both 'X' or '0'
# qd = django.http.QueryDict(nvp_obj.response)
# if qd.get('cvv2match') not in ['X', '0']:
# nvp_obj.set_flag("Invalid cvv2match: %s" % qd.get('cvv2match')
# if qd.get('avscode') not in ['X', '0']:
# nvp_obj.set_flag("Invalid avscode: %s" % qd.get('avscode')
return nvp_obj
def setExpressCheckout(self, params):
"""
Initiates an Express Checkout transaction.
Optionally, the SetExpressCheckout API operation can set up billing agreements for
reference transactions and recurring payments.
Returns a NVP instance - check for token and payerid to continue!
"""
if "amt" in params:
import warnings
warnings.warn("'amt' has been deprecated. 'paymentrequest_0_amt' "
"should be used instead.", DeprecationWarning)
# Make a copy so we don't change things unexpectedly
params = params.copy()
params.update({'paymentrequest_0_amt': params['amt']})
del params['amt']
if self._is_recurring(params):
params = self._recurring_setExpressCheckout_adapter(params)
defaults = {"method": "SetExpressCheckout", "noshipping": 1}
required = ["returnurl", "cancelurl", "paymentrequest_0_amt"]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj)
return nvp_obj
def doExpressCheckoutPayment(self, params):
"""
Check the dude out:
"""
if "amt" in params:
import warnings
warnings.warn("'amt' has been deprecated. 'paymentrequest_0_amt' "
"should be used instead.", DeprecationWarning)
# Make a copy so we don't change things unexpectedly
params = params.copy()
params.update({'paymentrequest_0_amt': params['amt']})
del params['amt']
defaults = {"method": "DoExpressCheckoutPayment", "paymentaction": "Sale"}
required = ["paymentrequest_0_amt", "token", "payerid"]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj)
payment_was_successful.send(sender=nvp_obj, **params)
return nvp_obj
def createRecurringPaymentsProfile(self, params, direct=False):
"""
Set direct to True to indicate that this is being called as a directPayment.
Returns True PayPal successfully creates the profile otherwise False.
"""
defaults = {"method": "CreateRecurringPaymentsProfile"}
required = ["profilestartdate", "billingperiod", "billingfrequency", "amt"]
# Direct payments require CC data
if direct:
required + ["creditcardtype", "acct", "expdate", "firstname", "lastname"]
else:
required + ["token", "payerid"]
nvp_obj = self._fetch(params, required, defaults)
# Flag if profile_type != ActiveProfile
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj)
payment_profile_created.send(sender=nvp_obj, **params)
return nvp_obj
def getExpressCheckoutDetails(self, params):
defaults = {"method": "GetExpressCheckoutDetails"}
required = ["token"]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj)
return nvp_obj
def setCustomerBillingAgreement(self, params):
raise DeprecationWarning
def createBillingAgreement(self, params):
"""
Create a billing agreement for future use, without any initial payment
"""
defaults = {"method": "CreateBillingAgreement"}
required = ["token"]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj)
return nvp_obj
def getTransactionDetails(self, params):
defaults = {"method": "GetTransactionDetails"}
required = ["transactionid"]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj)
return nvp_obj
def massPay(self, params):
raise NotImplementedError
def getRecurringPaymentsProfileDetails(self, params):
raise NotImplementedError
def updateRecurringPaymentsProfile(self, params):
defaults = {"method": "UpdateRecurringPaymentsProfile"}
required = ["profileid"]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj)
return nvp_obj
def billOutstandingAmount(self, params):
raise NotImplementedError
def manangeRecurringPaymentsProfileStatus(self, params, fail_silently=False):
"""
Requires `profileid` and `action` params.
Action must be either "Cancel", "Suspend", or "Reactivate".
"""
defaults = {"method": "ManageRecurringPaymentsProfileStatus"}
required = ["profileid", "action"]
nvp_obj = self._fetch(params, required, defaults)
# TODO: This fail silently check should be using the error code, but its not easy to access
if not nvp_obj.flag or (
fail_silently and nvp_obj.flag_info == 'Invalid profile status for cancel action; profile should be active or suspended'):
if params['action'] == 'Cancel':
recurring_cancel.send(sender=nvp_obj)
elif params['action'] == 'Suspend':
recurring_suspend.send(sender=nvp_obj)
elif params['action'] == 'Reactivate':
recurring_reactivate.send(sender=nvp_obj)
else:
raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj)
return nvp_obj
def refundTransaction(self, params):
raise NotImplementedError
def doReferenceTransaction(self, params):
"""
Process a payment from a buyer's account, identified by a previous
transaction.
The `paymentaction` param defaults to "Sale", but may also contain the
values "Authorization" or "Order".
"""
defaults = {"method": "DoReferenceTransaction",
"paymentaction": "Sale"}
required = ["referenceid", "amt"]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj)
return nvp_obj
def _is_recurring(self, params):
"""Returns True if the item passed is a recurring transaction."""
return 'billingfrequency' in params
def _recurring_setExpressCheckout_adapter(self, params):
"""
The recurring payment interface to SEC is different than the recurring payment
interface to ECP. This adapts a normal call to look like a SEC call.
"""
params['l_billingtype0'] = "RecurringPayments"
params['l_billingagreementdescription0'] = params['desc']
REMOVE = ["billingfrequency", "billingperiod", "profilestartdate", "desc"]
for k in params.keys():
if k in REMOVE:
del params[k]
return params
def _fetch(self, params, required, defaults):
"""Make the NVP request and store the response."""
defaults.update(params)
pp_params = self._check_and_update_params(required, defaults)
pp_string = self.signature + urlencode(pp_params)
response = self._request(pp_string)
response_params = self._parse_response(response)
if getattr(settings, 'PAYPAL_DEBUG', settings.DEBUG):
log.debug('PayPal Request:\n%s\n', pprint.pformat(defaults))
log.debug('PayPal Response:\n%s\n', pprint.pformat(response_params))
# Gather all NVP parameters to pass to a new instance.
nvp_params = {}
tmpd = defaults.copy()
tmpd.update(response_params)
for k, v in tmpd.items():
if k in self.NVP_FIELDS:
nvp_params[str(k)] = v
# PayPal timestamp has to be formatted.
if 'timestamp' in nvp_params:
nvp_params['timestamp'] = paypaltime2datetime(nvp_params['timestamp'])
nvp_obj = PayPalNVP(**nvp_params)
nvp_obj.init(self.request, params, response_params)
nvp_obj.save()
return nvp_obj
def _request(self, data):
"""Moved out to make testing easier."""
return requests.post(self.endpoint, data=data.encode("ascii")).content
def _check_and_update_params(self, required, params):
"""
Ensure all required parameters were passed to the API call and format
them correctly.
"""
for r in required:
if r not in params:
raise PayPalError("Missing required param: %s" % r)
# Upper case all the parameters for PayPal.
return (dict((k.upper(), v) for k, v in params.items()))
def _parse_response(self, response):
"""Turn the PayPal response into a dict"""
q = QueryDict(response, encoding='UTF-8').dict()
return {k.lower(): v for k,v in q.items()}
|
mit
|
wangyao1052/PyLitMath
|
LitMath/Quaternion.py
|
1
|
7864
|
import math
import Util
class Quaternion(object):
__slots__ = ['x', 'y', 'z', 'w']
__hash__ = None
def __init__(self, x=0.0, y=0.0, z=0.0, w=1.0):
self.x = float(x)
self.y = float(y)
self.z = float(z)
self.w = float(w)
def set(self, x, y, z, w):
self.x = float(x)
self.y = float(y)
self.z = float(z)
self.w = float(w)
return self
def copy(self):
import copy
return copy.copy(self)
def __repr__(self):
return 'Quaternion( %.2f, %.2f, %.2f, %.2f )' % \
(self.x, self.y, self.z, self.w)
def __eq__(self, other):
if isinstance(other, Quaternion):
return Util.isEqual(self.x, other.x) and \
Util.isEqual(self.y, other.y) and \
Util.isEqual(self.z, other.z) and \
Util.isEqual(self.w, other.w)
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
@property
def magnitude(self):
return math.sqrt(self.x ** 2 +
self.y ** 2 +
self.z ** 2 +
self.w ** 2)
def normalize(self):
len = self.magnitude
if len != 0:
self.x /= len
self.y /= len
self.z /= len
self.w /= len
return self
@property
def normalized(self):
len = self.magnitude
if len == 0:
return self.copy()
else:
return Quaternion(self.x / len, self.y / len, self.z / len, self.w / len)
def invert(self):
self.x = -self.x
self.y = -self.y
self.z = -self.z
return self
@property
def inverse(self):
return Quaternion(-self.x, -self.y, -self.z, self.w)
def __mul__(self, other):
'''Multiplies two quaternions.'''
assert isinstance(other, Quaternion)
return Quaternion(
self.w * other.x + self.x * other.w + self.y * other.z - self.z * other.y,
self.w * other.y - self.x * other.z + self.y * other.w + self.z * other.x,
self.w * other.z + self.x * other.y - self.y * other.x + self.z * other.w,
self.w * other.w - self.x * other.x - self.y * other.y - self.z * other.z)
def multiplyPoint(self, pnt):
'''Rotates the point pnt by this quaternion.'''
import Vector3
assert isinstance(pnt, Vector3.Vector3)
x = self.x
y = self.y
z = self.z
w = self.w
x2 = self.x * self.x
y2 = self.y * self.y
z2 = self.z * self.z
w2 = self.w * self.w
dx = (x2+w2-y2-z2)*pnt.x + 2.0*(x*y-z*w)*pnt.y + 2.0*(x*z+y*w)*pnt.z
dy = 2.0*(x*y+z*w)*pnt.x + (w2-x2+y2-z2)*pnt.y + 2.0*(y*z-x*w)*pnt.z
dz = 2.0*(x*z-y*w)*pnt.x + 2.0*(x*w+y*z)*pnt.y + (w2-x2-y2+z2)*pnt.z
return Vector3.Vector3(dx, dy, dz)
def toMatrix4(self):
'''Converts a rotation to 4x4 matrix.'''
# reference:FreeCAD Rotation.cpp
x = self.x
y = self.y
z = self.z
w = self.w
import Matrix4
matrix = Matrix4.Matrix4()
matrix.m11 = 1.0-2.0*(y*y+z*z)
matrix.m12 = 2.0*(x*y-z*w)
matrix.m13 = 2.0*(x*z+y*w)
matrix.m14 = 0.0
matrix.m21 = 2.0*(x*y+z*w)
matrix.m22 = 1.0-2.0*(x*x+z*z)
matrix.m23 = 2.0*(y*z-x*w)
matrix.m24 = 0.0
matrix.m31 = 2.0*(x*z-y*w)
matrix.m32 = 2.0*(y*z+x*w)
matrix.m33 = 1.0-2.0*(x*x+y*y)
matrix.m34 = 0.0
matrix.m41 = 0.0
matrix.m42 = 0.0
matrix.m43 = 0.0
matrix.m44 = 1.0
return matrix
def toAxisAngle(self):
'''Converts a rotation to axis-angle representation(angle in degree).'''
axis, angle = self.toAxisAngleInRadian()
return axis, Util.radianToDegree(angle)
def toAxisAngleInRadian(self):
'''Converts a rotation to axis-angle representation(angle in radian).'''
import Vector3
# reference:FreeCAD Rotation.cpp
if self.w > -1.0 and self.w < 1.0:
t = math.acos(self.w)
scale = math.sin(t)
if Util.isEqualZero(scale):
return Vector3.Vector3(0,0,1), 0.0
else:
axis = Vector3.Vector3(self.x / scale, self.y / scale, self.z / scale)
return axis, 2*t
else:
return Vector3.Vector3(0,0,1), 0.0
def setIdentity(self):
self.set(0.0, 0.0, 0.0, 1.0)
return self
@staticmethod
def identity():
'''Returns the identity quaternion.'''
return Quaternion(0.0, 0.0, 0.0, 1.0)
@staticmethod
def matrix4(matrix):
import Matrix4
assert isinstance(matrix, Matrix4.Matrix4)
quat = Quaternion()
M = matrix
trace = M.m11 + M.m22 + M.m33
if trace > 0:
s = 0.5 / math.sqrt(trace + 1.0)
quat.w = 0.25 / s
quat.x = (M.m32 - M.m23 ) * s
quat.y = (M.m13 - M.m31 ) * s
quat.z = (M.m21 - M.m12 ) * s
elif M.m11 > M.m22 and M.m11 > M.m33:
s = 2.0 * math.sqrt(1.0 + M.m11 - M.m22 - M.m33)
quat.w = (M.m32 - M.m23) / s
quat.x = 0.25 * s
quat.y = (M.m12 + M.m21) / s
quat.z = (M.m13 + M.m31) / s
elif M.m22 > M.m33:
s = 2.0 * math.sqrt(1.0 + M.m22 - M.m11 - M.m33)
quat.w = (M.m13 - M.m31) / s
quat.x = (M.m12 + M.m21) / s
quat.y = 0.25 * s;
quat.z = (M.m23 + M.m32) / s
else:
s = 2.0 * math.sqrt(1.0 + M.m33 - M.m11 - M.m22)
quat.w = (M.m21 - M.m12) / s
quat.x = (M.m13 + M.m31) / s
quat.y = (M.m23 + M.m32) / s
quat.z = 0.25 * s
return quat
@staticmethod
def axisAngle(axis, angle):
'''Creates a rotation which rotates angle degrees around axis.'''
return Quaternion.axisAngleInRadian(axis, Util.degreeToRadian(angle))
@staticmethod
def axisAngleInRadian(axis, angle):
'''Creates a rotation which rotates angle degrees around axis.'''
import Vector3
assert isinstance(axis, Vector3.Vector3) and \
type(angle) in (int, long, float)
axis = axis.normalized
scale = math.sin(angle / 2)
quat = Quaternion()
quat.w = math.cos(angle / 2)
quat.x = axis.x * scale
quat.y = axis.y * scale
quat.z = axis.z * scale
return quat
@staticmethod
def fromToRotation(f, to):
'''Creates a rotation which rotates from from(Vector) to to(Vector).'''
from Vector3 import Vector3
assert isinstance(f, Vector3) and isinstance(to, Vector3)
# reference:FreeCAD Rotation.cpp
u = f.normalized
v = to.normalized
dot = Vector3.dot(u, v)
w = Vector3.cross(u, v)
# parallel vectors
if w.length == 0:
# same direction
if dot >= 0:
return Quaternion(0.0, 0.0, 0.0, 1.0)
else:
t = Vector3.cross(u, Vector3(1.0, 0.0, 0.0))
if Util.isEqualZero(t.length):
t = Vector3.cross(u, Vector3(0.0, 1.0, 0.0))
return Quaternion(t.x, t.y, t.z, 0.0)
else:
angleInRad = math.acos(dot)
return Quaternion.axisAngleInRadian(w, angleInRad)
|
mit
|
jumpstarter-io/cinder
|
cinder/flow_utils.py
|
4
|
5897
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging as base_logging
# For more information please visit: https://wiki.openstack.org/wiki/TaskFlow
from taskflow.listeners import base as base_listener
from taskflow import states
from taskflow import task
from taskflow.utils import misc
from cinder.i18n import _
from cinder.openstack.common import log as logging
LOG = logging.getLogger(__name__)
def _make_task_name(cls, addons=None):
"""Makes a pretty name for a task class."""
base_name = ".".join([cls.__module__, cls.__name__])
extra = ''
if addons:
extra = ';%s' % (", ".join([str(a) for a in addons]))
return base_name + extra
class CinderTask(task.Task):
"""The root task class for all cinder tasks.
It automatically names the given task using the module and class that
implement the given task as the task name.
"""
def __init__(self, addons=None, **kwargs):
super(CinderTask, self).__init__(_make_task_name(self.__class__,
addons),
**kwargs)
class DynamicLogListener(base_listener.ListenerBase):
"""This is used to attach to taskflow engines while they are running.
It provides a bunch of useful features that expose the actions happening
inside a taskflow engine, which can be useful for developers for debugging,
for operations folks for monitoring and tracking of the resource actions
and more...
"""
def __init__(self, engine,
task_listen_for=(misc.Notifier.ANY,),
flow_listen_for=(misc.Notifier.ANY,),
logger=None):
super(DynamicLogListener, self).__init__(
engine,
task_listen_for=task_listen_for,
flow_listen_for=flow_listen_for)
if logger is None:
self._logger = LOG
else:
self._logger = logger
def _flow_receiver(self, state, details):
# Gets called on flow state changes.
level = base_logging.DEBUG
if state in (states.FAILURE, states.REVERTED):
level = base_logging.WARNING
self._logger.log(level,
_("Flow '%(flow_name)s' (%(flow_uuid)s) transitioned"
" into state '%(state)s' from state"
" '%(old_state)s'") %
{'flow_name': details['flow_name'],
'flow_uuid': details['flow_uuid'],
'state': state,
'old_state': details.get('old_state')})
def _task_receiver(self, state, details):
# Gets called on task state changes.
if 'result' in details and state in base_listener.FINISH_STATES:
# If the task failed, it's useful to show the exception traceback
# and any other available exception information.
result = details.get('result')
if isinstance(result, misc.Failure):
self._logger.warn(_("Task '%(task_name)s' (%(task_uuid)s)"
" transitioned into state '%(state)s'") %
{'task_name': details['task_name'],
'task_uuid': details['task_uuid'],
'state': state},
exc_info=tuple(result.exc_info))
else:
# Otherwise, depending on the enabled logging level/state we
# will show or hide results that the task may have produced
# during execution.
level = base_logging.DEBUG
if state == states.FAILURE:
level = base_logging.WARNING
if (self._logger.isEnabledFor(base_logging.DEBUG) or
state == states.FAILURE):
self._logger.log(level,
_("Task '%(task_name)s' (%(task_uuid)s)"
" transitioned into state '%(state)s'"
" with result '%(result)s'") %
{'task_name': details['task_name'],
'task_uuid': details['task_uuid'],
'state': state, 'result': result})
else:
self._logger.log(level,
_("Task '%(task_name)s' (%(task_uuid)s)"
" transitioned into state"
" '%(state)s'") %
{'task_name': details['task_name'],
'task_uuid': details['task_uuid'],
'state': state})
else:
level = base_logging.DEBUG
if state in (states.REVERTING, states.RETRYING):
level = base_logging.WARNING
self._logger.log(level,
_("Task '%(task_name)s' (%(task_uuid)s)"
" transitioned into state '%(state)s'") %
{'task_name': details['task_name'],
'task_uuid': details['task_uuid'],
'state': state})
|
apache-2.0
|
nvoron23/POSTMan-Chrome-Extension
|
tests/selenium/pmtests/postman_tests_header_presets.py
|
104
|
4445
|
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.select import Select
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.keys import Keys
import selenium.webdriver.chrome.service as service
import inspect
import time
from postman_tests import PostmanTests
class PostmanTestsHeaderPresets(PostmanTests):
def test_1_add_header_preset(self):
self.reset_request()
self.browser.find_element_by_id("headers-keyvaleditor-actions-open").click()
time.sleep(0.1)
self.browser.find_element_by_id("headers-keyvaleditor-actions-manage-presets").click()
time.sleep(1)
self.browser.find_element_by_css_selector("#header-presets-list-wrapper .header-presets-actions-add").click()
self.browser.find_element_by_id("header-presets-editor-name").send_keys("Test preset")
first_key = self.browser.find_element_by_css_selector("#header-presets-keyvaleditor .keyvalueeditor-row:first-child .keyvalueeditor-key")
first_key.clear()
first_key.send_keys("Global Foo")
first_val = self.browser.find_element_by_css_selector("#header-presets-keyvaleditor .keyvalueeditor-row:first-child .keyvalueeditor-value")
first_val.clear()
first_val.send_keys("Global Bar")
second_key = self.browser.find_element_by_css_selector("#header-presets-keyvaleditor .keyvalueeditor-row:nth-of-type(2) .keyvalueeditor-key")
second_key.clear()
second_key.send_keys("Global Foo 1")
second_val = self.browser.find_element_by_css_selector("#header-presets-keyvaleditor .keyvalueeditor-row:nth-of-type(2) .keyvalueeditor-value")
second_val.clear()
second_val.send_keys("Global Bar 2")
self.browser.find_element_by_css_selector(".header-presets-actions-submit").click()
time.sleep(0.1)
presets_list = self.browser.find_element_by_id("header-presets-list")
presets_list_value = self.browser.execute_script("return arguments[0].innerHTML", presets_list)
if presets_list_value.find("Test preset") > 0:
return True
else:
return False
def test_2_edit_header_preset(self):
self.browser.find_element_by_css_selector("#header-presets-list tbody tr:first-child .header-preset-action-edit").click()
self.browser.find_element_by_css_selector("#header-presets-editor-name").clear()
self.browser.find_element_by_css_selector("#header-presets-editor-name").send_keys("Edited preset")
self.browser.find_element_by_css_selector(".header-presets-actions-submit").click()
time.sleep(0.1)
presets_list = self.browser.find_element_by_id("header-presets-list")
presets_list_value = self.browser.execute_script("return arguments[0].innerHTML", presets_list)
if presets_list_value.find("Edited preset") > 0:
return True
else:
return False
def test_3_select_header_preset(self):
close_button = self.browser.find_element_by_css_selector("#modal-header-presets .modal-header .close")
close_button.click()
time.sleep(1)
first_key = self.browser.find_element_by_css_selector("#headers-keyvaleditor .keyvalueeditor-row:first-child .keyvalueeditor-key")
first_key.clear()
first_key.send_keys("Edit")
autocomplete_menus = self.browser.find_elements_by_css_selector(".ui-autocomplete")
for menu in autocomplete_menus:
if menu.text.find("Edited preset") > 0:
return True
return False
def test_4_delete_header_preset(self):
self.reset_request()
self.browser.find_element_by_id("headers-keyvaleditor-actions-manage-presets").click()
time.sleep(1)
delete_button = self.browser.find_element_by_css_selector("#header-presets-list tbody tr:first-child .header-preset-action-delete")
delete_button.click()
header_presets_list = self.browser.find_element_by_id("header-presets-list")
header_presets_list_value = self.browser.execute_script("return arguments[0].innerHTML", header_presets_list)
if header_presets_list_value.find("Edited preset") < 0:
return True
else:
return False
PostmanTestsHeaderPresets().run()
|
apache-2.0
|
Microvellum/Fluid-Designer
|
win64-vc/2.78/python/lib/site-packages/numpy/distutils/cpuinfo.py
|
173
|
22970
|
#!/usr/bin/env python
"""
cpuinfo
Copyright 2002 Pearu Peterson all rights reserved,
Pearu Peterson <[email protected]>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy (BSD style) license. See LICENSE.txt that came with
this distribution for specifics.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
Pearu Peterson
"""
from __future__ import division, absolute_import, print_function
__all__ = ['cpu']
import sys, re, types
import os
if sys.version_info[0] >= 3:
from subprocess import getstatusoutput
else:
from commands import getstatusoutput
import warnings
import platform
from numpy.distutils.compat import get_exception
def getoutput(cmd, successful_status=(0,), stacklevel=1):
try:
status, output = getstatusoutput(cmd)
except EnvironmentError:
e = get_exception()
warnings.warn(str(e), UserWarning, stacklevel=stacklevel)
return False, output
if os.WIFEXITED(status) and os.WEXITSTATUS(status) in successful_status:
return True, output
return False, output
def command_info(successful_status=(0,), stacklevel=1, **kw):
info = {}
for key in kw:
ok, output = getoutput(kw[key], successful_status=successful_status,
stacklevel=stacklevel+1)
if ok:
info[key] = output.strip()
return info
def command_by_line(cmd, successful_status=(0,), stacklevel=1):
ok, output = getoutput(cmd, successful_status=successful_status,
stacklevel=stacklevel+1)
if not ok:
return
for line in output.splitlines():
yield line.strip()
def key_value_from_command(cmd, sep, successful_status=(0,),
stacklevel=1):
d = {}
for line in command_by_line(cmd, successful_status=successful_status,
stacklevel=stacklevel+1):
l = [s.strip() for s in line.split(sep, 1)]
if len(l) == 2:
d[l[0]] = l[1]
return d
class CPUInfoBase(object):
"""Holds CPU information and provides methods for requiring
the availability of various CPU features.
"""
def _try_call(self, func):
try:
return func()
except:
pass
def __getattr__(self, name):
if not name.startswith('_'):
if hasattr(self, '_'+name):
attr = getattr(self, '_'+name)
if isinstance(attr, types.MethodType):
return lambda func=self._try_call,attr=attr : func(attr)
else:
return lambda : None
raise AttributeError(name)
def _getNCPUs(self):
return 1
def __get_nbits(self):
abits = platform.architecture()[0]
nbits = re.compile('(\d+)bit').search(abits).group(1)
return nbits
def _is_32bit(self):
return self.__get_nbits() == '32'
def _is_64bit(self):
return self.__get_nbits() == '64'
class LinuxCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = [ {} ]
ok, output = getoutput('uname -m')
if ok:
info[0]['uname_m'] = output.strip()
try:
fo = open('/proc/cpuinfo')
except EnvironmentError:
e = get_exception()
warnings.warn(str(e), UserWarning)
else:
for line in fo:
name_value = [s.strip() for s in line.split(':', 1)]
if len(name_value) != 2:
continue
name, value = name_value
if not info or name in info[-1]: # next processor
info.append({})
info[-1][name] = value
fo.close()
self.__class__.info = info
def _not_impl(self): pass
# Athlon
def _is_AMD(self):
return self.info[0]['vendor_id']=='AuthenticAMD'
def _is_AthlonK6_2(self):
return self._is_AMD() and self.info[0]['model'] == '2'
def _is_AthlonK6_3(self):
return self._is_AMD() and self.info[0]['model'] == '3'
def _is_AthlonK6(self):
return re.match(r'.*?AMD-K6', self.info[0]['model name']) is not None
def _is_AthlonK7(self):
return re.match(r'.*?AMD-K7', self.info[0]['model name']) is not None
def _is_AthlonMP(self):
return re.match(r'.*?Athlon\(tm\) MP\b',
self.info[0]['model name']) is not None
def _is_AMD64(self):
return self.is_AMD() and self.info[0]['family'] == '15'
def _is_Athlon64(self):
return re.match(r'.*?Athlon\(tm\) 64\b',
self.info[0]['model name']) is not None
def _is_AthlonHX(self):
return re.match(r'.*?Athlon HX\b',
self.info[0]['model name']) is not None
def _is_Opteron(self):
return re.match(r'.*?Opteron\b',
self.info[0]['model name']) is not None
def _is_Hammer(self):
return re.match(r'.*?Hammer\b',
self.info[0]['model name']) is not None
# Alpha
def _is_Alpha(self):
return self.info[0]['cpu']=='Alpha'
def _is_EV4(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV4'
def _is_EV5(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV5'
def _is_EV56(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV56'
def _is_PCA56(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'PCA56'
# Intel
#XXX
_is_i386 = _not_impl
def _is_Intel(self):
return self.info[0]['vendor_id']=='GenuineIntel'
def _is_i486(self):
return self.info[0]['cpu']=='i486'
def _is_i586(self):
return self.is_Intel() and self.info[0]['cpu family'] == '5'
def _is_i686(self):
return self.is_Intel() and self.info[0]['cpu family'] == '6'
def _is_Celeron(self):
return re.match(r'.*?Celeron',
self.info[0]['model name']) is not None
def _is_Pentium(self):
return re.match(r'.*?Pentium',
self.info[0]['model name']) is not None
def _is_PentiumII(self):
return re.match(r'.*?Pentium.*?II\b',
self.info[0]['model name']) is not None
def _is_PentiumPro(self):
return re.match(r'.*?PentiumPro\b',
self.info[0]['model name']) is not None
def _is_PentiumMMX(self):
return re.match(r'.*?Pentium.*?MMX\b',
self.info[0]['model name']) is not None
def _is_PentiumIII(self):
return re.match(r'.*?Pentium.*?III\b',
self.info[0]['model name']) is not None
def _is_PentiumIV(self):
return re.match(r'.*?Pentium.*?(IV|4)\b',
self.info[0]['model name']) is not None
def _is_PentiumM(self):
return re.match(r'.*?Pentium.*?M\b',
self.info[0]['model name']) is not None
def _is_Prescott(self):
return self.is_PentiumIV() and self.has_sse3()
def _is_Nocona(self):
return self.is_Intel() \
and (self.info[0]['cpu family'] == '6' \
or self.info[0]['cpu family'] == '15' ) \
and (self.has_sse3() and not self.has_ssse3())\
and re.match(r'.*?\blm\b', self.info[0]['flags']) is not None
def _is_Core2(self):
return self.is_64bit() and self.is_Intel() and \
re.match(r'.*?Core\(TM\)2\b', \
self.info[0]['model name']) is not None
def _is_Itanium(self):
return re.match(r'.*?Itanium\b',
self.info[0]['family']) is not None
def _is_XEON(self):
return re.match(r'.*?XEON\b',
self.info[0]['model name'], re.IGNORECASE) is not None
_is_Xeon = _is_XEON
# Varia
def _is_singleCPU(self):
return len(self.info) == 1
def _getNCPUs(self):
return len(self.info)
def _has_fdiv_bug(self):
return self.info[0]['fdiv_bug']=='yes'
def _has_f00f_bug(self):
return self.info[0]['f00f_bug']=='yes'
def _has_mmx(self):
return re.match(r'.*?\bmmx\b', self.info[0]['flags']) is not None
def _has_sse(self):
return re.match(r'.*?\bsse\b', self.info[0]['flags']) is not None
def _has_sse2(self):
return re.match(r'.*?\bsse2\b', self.info[0]['flags']) is not None
def _has_sse3(self):
return re.match(r'.*?\bpni\b', self.info[0]['flags']) is not None
def _has_ssse3(self):
return re.match(r'.*?\bssse3\b', self.info[0]['flags']) is not None
def _has_3dnow(self):
return re.match(r'.*?\b3dnow\b', self.info[0]['flags']) is not None
def _has_3dnowext(self):
return re.match(r'.*?\b3dnowext\b', self.info[0]['flags']) is not None
class IRIXCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = key_value_from_command('sysconf', sep=' ',
successful_status=(0, 1))
self.__class__.info = info
def _not_impl(self): pass
def _is_singleCPU(self):
return self.info.get('NUM_PROCESSORS') == '1'
def _getNCPUs(self):
return int(self.info.get('NUM_PROCESSORS', 1))
def __cputype(self, n):
return self.info.get('PROCESSORS').split()[0].lower() == 'r%s' % (n)
def _is_r2000(self): return self.__cputype(2000)
def _is_r3000(self): return self.__cputype(3000)
def _is_r3900(self): return self.__cputype(3900)
def _is_r4000(self): return self.__cputype(4000)
def _is_r4100(self): return self.__cputype(4100)
def _is_r4300(self): return self.__cputype(4300)
def _is_r4400(self): return self.__cputype(4400)
def _is_r4600(self): return self.__cputype(4600)
def _is_r4650(self): return self.__cputype(4650)
def _is_r5000(self): return self.__cputype(5000)
def _is_r6000(self): return self.__cputype(6000)
def _is_r8000(self): return self.__cputype(8000)
def _is_r10000(self): return self.__cputype(10000)
def _is_r12000(self): return self.__cputype(12000)
def _is_rorion(self): return self.__cputype('orion')
def get_ip(self):
try: return self.info.get('MACHINE')
except: pass
def __machine(self, n):
return self.info.get('MACHINE').lower() == 'ip%s' % (n)
def _is_IP19(self): return self.__machine(19)
def _is_IP20(self): return self.__machine(20)
def _is_IP21(self): return self.__machine(21)
def _is_IP22(self): return self.__machine(22)
def _is_IP22_4k(self): return self.__machine(22) and self._is_r4000()
def _is_IP22_5k(self): return self.__machine(22) and self._is_r5000()
def _is_IP24(self): return self.__machine(24)
def _is_IP25(self): return self.__machine(25)
def _is_IP26(self): return self.__machine(26)
def _is_IP27(self): return self.__machine(27)
def _is_IP28(self): return self.__machine(28)
def _is_IP30(self): return self.__machine(30)
def _is_IP32(self): return self.__machine(32)
def _is_IP32_5k(self): return self.__machine(32) and self._is_r5000()
def _is_IP32_10k(self): return self.__machine(32) and self._is_r10000()
class DarwinCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = command_info(arch='arch',
machine='machine')
info['sysctl_hw'] = key_value_from_command('sysctl hw', sep='=')
self.__class__.info = info
def _not_impl(self): pass
def _getNCPUs(self):
return int(self.info['sysctl_hw'].get('hw.ncpu', 1))
def _is_Power_Macintosh(self):
return self.info['sysctl_hw']['hw.machine']=='Power Macintosh'
def _is_i386(self):
return self.info['arch']=='i386'
def _is_ppc(self):
return self.info['arch']=='ppc'
def __machine(self, n):
return self.info['machine'] == 'ppc%s'%n
def _is_ppc601(self): return self.__machine(601)
def _is_ppc602(self): return self.__machine(602)
def _is_ppc603(self): return self.__machine(603)
def _is_ppc603e(self): return self.__machine('603e')
def _is_ppc604(self): return self.__machine(604)
def _is_ppc604e(self): return self.__machine('604e')
def _is_ppc620(self): return self.__machine(620)
def _is_ppc630(self): return self.__machine(630)
def _is_ppc740(self): return self.__machine(740)
def _is_ppc7400(self): return self.__machine(7400)
def _is_ppc7450(self): return self.__machine(7450)
def _is_ppc750(self): return self.__machine(750)
def _is_ppc403(self): return self.__machine(403)
def _is_ppc505(self): return self.__machine(505)
def _is_ppc801(self): return self.__machine(801)
def _is_ppc821(self): return self.__machine(821)
def _is_ppc823(self): return self.__machine(823)
def _is_ppc860(self): return self.__machine(860)
class SunOSCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = command_info(arch='arch',
mach='mach',
uname_i='uname_i',
isainfo_b='isainfo -b',
isainfo_n='isainfo -n',
)
info['uname_X'] = key_value_from_command('uname -X', sep='=')
for line in command_by_line('psrinfo -v 0'):
m = re.match(r'\s*The (?P<p>[\w\d]+) processor operates at', line)
if m:
info['processor'] = m.group('p')
break
self.__class__.info = info
def _not_impl(self): pass
def _is_i386(self):
return self.info['isainfo_n']=='i386'
def _is_sparc(self):
return self.info['isainfo_n']=='sparc'
def _is_sparcv9(self):
return self.info['isainfo_n']=='sparcv9'
def _getNCPUs(self):
return int(self.info['uname_X'].get('NumCPU', 1))
def _is_sun4(self):
return self.info['arch']=='sun4'
def _is_SUNW(self):
return re.match(r'SUNW', self.info['uname_i']) is not None
def _is_sparcstation5(self):
return re.match(r'.*SPARCstation-5', self.info['uname_i']) is not None
def _is_ultra1(self):
return re.match(r'.*Ultra-1', self.info['uname_i']) is not None
def _is_ultra250(self):
return re.match(r'.*Ultra-250', self.info['uname_i']) is not None
def _is_ultra2(self):
return re.match(r'.*Ultra-2', self.info['uname_i']) is not None
def _is_ultra30(self):
return re.match(r'.*Ultra-30', self.info['uname_i']) is not None
def _is_ultra4(self):
return re.match(r'.*Ultra-4', self.info['uname_i']) is not None
def _is_ultra5_10(self):
return re.match(r'.*Ultra-5_10', self.info['uname_i']) is not None
def _is_ultra5(self):
return re.match(r'.*Ultra-5', self.info['uname_i']) is not None
def _is_ultra60(self):
return re.match(r'.*Ultra-60', self.info['uname_i']) is not None
def _is_ultra80(self):
return re.match(r'.*Ultra-80', self.info['uname_i']) is not None
def _is_ultraenterprice(self):
return re.match(r'.*Ultra-Enterprise', self.info['uname_i']) is not None
def _is_ultraenterprice10k(self):
return re.match(r'.*Ultra-Enterprise-10000', self.info['uname_i']) is not None
def _is_sunfire(self):
return re.match(r'.*Sun-Fire', self.info['uname_i']) is not None
def _is_ultra(self):
return re.match(r'.*Ultra', self.info['uname_i']) is not None
def _is_cpusparcv7(self):
return self.info['processor']=='sparcv7'
def _is_cpusparcv8(self):
return self.info['processor']=='sparcv8'
def _is_cpusparcv9(self):
return self.info['processor']=='sparcv9'
class Win32CPUInfo(CPUInfoBase):
info = None
pkey = r"HARDWARE\DESCRIPTION\System\CentralProcessor"
# XXX: what does the value of
# HKEY_LOCAL_MACHINE\HARDWARE\DESCRIPTION\System\CentralProcessor\0
# mean?
def __init__(self):
if self.info is not None:
return
info = []
try:
#XXX: Bad style to use so long `try:...except:...`. Fix it!
if sys.version_info[0] >= 3:
import winreg
else:
import _winreg as winreg
prgx = re.compile(r"family\s+(?P<FML>\d+)\s+model\s+(?P<MDL>\d+)"\
"\s+stepping\s+(?P<STP>\d+)", re.IGNORECASE)
chnd=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, self.pkey)
pnum=0
while True:
try:
proc=winreg.EnumKey(chnd, pnum)
except winreg.error:
break
else:
pnum+=1
info.append({"Processor":proc})
phnd=winreg.OpenKey(chnd, proc)
pidx=0
while True:
try:
name, value, vtpe=winreg.EnumValue(phnd, pidx)
except winreg.error:
break
else:
pidx=pidx+1
info[-1][name]=value
if name=="Identifier":
srch=prgx.search(value)
if srch:
info[-1]["Family"]=int(srch.group("FML"))
info[-1]["Model"]=int(srch.group("MDL"))
info[-1]["Stepping"]=int(srch.group("STP"))
except:
print(sys.exc_info()[1], '(ignoring)')
self.__class__.info = info
def _not_impl(self): pass
# Athlon
def _is_AMD(self):
return self.info[0]['VendorIdentifier']=='AuthenticAMD'
def _is_Am486(self):
return self.is_AMD() and self.info[0]['Family']==4
def _is_Am5x86(self):
return self.is_AMD() and self.info[0]['Family']==4
def _is_AMDK5(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model'] in [0, 1, 2, 3]
def _is_AMDK6(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model'] in [6, 7]
def _is_AMDK6_2(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model']==8
def _is_AMDK6_3(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model']==9
def _is_AMDK7(self):
return self.is_AMD() and self.info[0]['Family'] == 6
# To reliably distinguish between the different types of AMD64 chips
# (Athlon64, Operton, Athlon64 X2, Semperon, Turion 64, etc.) would
# require looking at the 'brand' from cpuid
def _is_AMD64(self):
return self.is_AMD() and self.info[0]['Family'] == 15
# Intel
def _is_Intel(self):
return self.info[0]['VendorIdentifier']=='GenuineIntel'
def _is_i386(self):
return self.info[0]['Family']==3
def _is_i486(self):
return self.info[0]['Family']==4
def _is_i586(self):
return self.is_Intel() and self.info[0]['Family']==5
def _is_i686(self):
return self.is_Intel() and self.info[0]['Family']==6
def _is_Pentium(self):
return self.is_Intel() and self.info[0]['Family']==5
def _is_PentiumMMX(self):
return self.is_Intel() and self.info[0]['Family']==5 \
and self.info[0]['Model']==4
def _is_PentiumPro(self):
return self.is_Intel() and self.info[0]['Family']==6 \
and self.info[0]['Model']==1
def _is_PentiumII(self):
return self.is_Intel() and self.info[0]['Family']==6 \
and self.info[0]['Model'] in [3, 5, 6]
def _is_PentiumIII(self):
return self.is_Intel() and self.info[0]['Family']==6 \
and self.info[0]['Model'] in [7, 8, 9, 10, 11]
def _is_PentiumIV(self):
return self.is_Intel() and self.info[0]['Family']==15
def _is_PentiumM(self):
return self.is_Intel() and self.info[0]['Family'] == 6 \
and self.info[0]['Model'] in [9, 13, 14]
def _is_Core2(self):
return self.is_Intel() and self.info[0]['Family'] == 6 \
and self.info[0]['Model'] in [15, 16, 17]
# Varia
def _is_singleCPU(self):
return len(self.info) == 1
def _getNCPUs(self):
return len(self.info)
def _has_mmx(self):
if self.is_Intel():
return (self.info[0]['Family']==5 and self.info[0]['Model']==4) \
or (self.info[0]['Family'] in [6, 15])
elif self.is_AMD():
return self.info[0]['Family'] in [5, 6, 15]
else:
return False
def _has_sse(self):
if self.is_Intel():
return (self.info[0]['Family']==6 and \
self.info[0]['Model'] in [7, 8, 9, 10, 11]) \
or self.info[0]['Family']==15
elif self.is_AMD():
return (self.info[0]['Family']==6 and \
self.info[0]['Model'] in [6, 7, 8, 10]) \
or self.info[0]['Family']==15
else:
return False
def _has_sse2(self):
if self.is_Intel():
return self.is_Pentium4() or self.is_PentiumM() \
or self.is_Core2()
elif self.is_AMD():
return self.is_AMD64()
else:
return False
def _has_3dnow(self):
return self.is_AMD() and self.info[0]['Family'] in [5, 6, 15]
def _has_3dnowext(self):
return self.is_AMD() and self.info[0]['Family'] in [6, 15]
if sys.platform.startswith('linux'): # variations: linux2,linux-i386 (any others?)
cpuinfo = LinuxCPUInfo
elif sys.platform.startswith('irix'):
cpuinfo = IRIXCPUInfo
elif sys.platform == 'darwin':
cpuinfo = DarwinCPUInfo
elif sys.platform.startswith('sunos'):
cpuinfo = SunOSCPUInfo
elif sys.platform.startswith('win32'):
cpuinfo = Win32CPUInfo
elif sys.platform.startswith('cygwin'):
cpuinfo = LinuxCPUInfo
#XXX: other OS's. Eg. use _winreg on Win32. Or os.uname on unices.
else:
cpuinfo = CPUInfoBase
cpu = cpuinfo()
#if __name__ == "__main__":
#
# cpu.is_blaa()
# cpu.is_Intel()
# cpu.is_Alpha()
#
# print 'CPU information:',
# for name in dir(cpuinfo):
# if name[0]=='_' and name[1]!='_':
# r = getattr(cpu,name[1:])()
# if r:
# if r!=1:
# print '%s=%s' %(name[1:],r),
# else:
# print name[1:],
# print
|
gpl-3.0
|
xuyuhan/depot_tools
|
third_party/pylint/config.py
|
67
|
5782
|
# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""utilities for Pylint configuration :
* pylintrc
* pylint.d (PYLINTHOME)
"""
from __future__ import with_statement
from __future__ import print_function
import pickle
import os
import sys
from os.path import exists, isfile, join, expanduser, abspath, dirname
# pylint home is used to save old runs results ################################
USER_HOME = expanduser('~')
if 'PYLINTHOME' in os.environ:
PYLINT_HOME = os.environ['PYLINTHOME']
if USER_HOME == '~':
USER_HOME = dirname(PYLINT_HOME)
elif USER_HOME == '~':
PYLINT_HOME = ".pylint.d"
else:
PYLINT_HOME = join(USER_HOME, '.pylint.d')
def get_pdata_path(base_name, recurs):
"""return the path of the file which should contain old search data for the
given base_name with the given options values
"""
base_name = base_name.replace(os.sep, '_')
return join(PYLINT_HOME, "%s%s%s"%(base_name, recurs, '.stats'))
def load_results(base):
"""try to unpickle and return data from file if it exists and is not
corrupted
return an empty dictionary if it doesn't exists
"""
data_file = get_pdata_path(base, 1)
try:
with open(data_file, _PICK_LOAD) as stream:
return pickle.load(stream)
except Exception: # pylint: disable=broad-except
return {}
if sys.version_info < (3, 0):
_PICK_DUMP, _PICK_LOAD = 'w', 'r'
else:
_PICK_DUMP, _PICK_LOAD = 'wb', 'rb'
def save_results(results, base):
"""pickle results"""
if not exists(PYLINT_HOME):
try:
os.mkdir(PYLINT_HOME)
except OSError:
print('Unable to create directory %s' % PYLINT_HOME, file=sys.stderr)
data_file = get_pdata_path(base, 1)
try:
with open(data_file, _PICK_DUMP) as stream:
pickle.dump(results, stream)
except (IOError, OSError) as ex:
print('Unable to create file %s: %s' % (data_file, ex), file=sys.stderr)
# location of the configuration file ##########################################
def find_pylintrc():
"""search the pylint rc file and return its path if it find it, else None
"""
# is there a pylint rc file in the current directory ?
if exists('pylintrc'):
return abspath('pylintrc')
if isfile('__init__.py'):
curdir = abspath(os.getcwd())
while isfile(join(curdir, '__init__.py')):
curdir = abspath(join(curdir, '..'))
if isfile(join(curdir, 'pylintrc')):
return join(curdir, 'pylintrc')
if 'PYLINTRC' in os.environ and exists(os.environ['PYLINTRC']):
pylintrc = os.environ['PYLINTRC']
else:
user_home = expanduser('~')
if user_home == '~' or user_home == '/root':
pylintrc = ".pylintrc"
else:
pylintrc = join(user_home, '.pylintrc')
if not isfile(pylintrc):
pylintrc = join(user_home, '.config', 'pylintrc')
if not isfile(pylintrc):
if isfile('/etc/pylintrc'):
pylintrc = '/etc/pylintrc'
else:
pylintrc = None
return pylintrc
PYLINTRC = find_pylintrc()
ENV_HELP = '''
The following environment variables are used:
* PYLINTHOME
Path to the directory where the persistent for the run will be stored. If
not found, it defaults to ~/.pylint.d/ or .pylint.d (in the current working
directory).
* PYLINTRC
Path to the configuration file. See the documentation for the method used
to search for configuration file.
''' % globals()
# evaluation messages #########################################################
def get_note_message(note):
"""return a message according to note
note is a float < 10 (10 is the highest note)
"""
assert note <= 10, "Note is %.2f. Either you cheated, or pylint's \
broken!" % note
if note < 0:
msg = 'You have to do something quick !'
elif note < 1:
msg = 'Hey! This is really dreadful. Or maybe pylint is buggy?'
elif note < 2:
msg = "Come on! You can't be proud of this code"
elif note < 3:
msg = 'Hum... Needs work.'
elif note < 4:
msg = 'Wouldn\'t you be a bit lazy?'
elif note < 5:
msg = 'A little more work would make it acceptable.'
elif note < 6:
msg = 'Just the bare minimum. Give it a bit more polish. '
elif note < 7:
msg = 'This is okay-ish, but I\'m sure you can do better.'
elif note < 8:
msg = 'If you commit now, people should not be making nasty \
comments about you on c.l.py'
elif note < 9:
msg = 'That\'s pretty good. Good work mate.'
elif note < 10:
msg = 'So close to being perfect...'
else:
msg = 'Wow ! Now this deserves our uttermost respect.\nPlease send \
your code to [email protected]'
return msg
|
bsd-3-clause
|
christophlsa/odoo
|
addons/hw_escpos/escpos/exceptions.py
|
170
|
2884
|
""" ESC/POS Exceptions classes """
import os
class Error(Exception):
""" Base class for ESC/POS errors """
def __init__(self, msg, status=None):
Exception.__init__(self)
self.msg = msg
self.resultcode = 1
if status is not None:
self.resultcode = status
def __str__(self):
return self.msg
# Result/Exit codes
# 0 = success
# 10 = No Barcode type defined
# 20 = Barcode size values are out of range
# 30 = Barcode text not supplied
# 40 = Image height is too large
# 50 = No string supplied to be printed
# 60 = Invalid pin to send Cash Drawer pulse
class BarcodeTypeError(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 10
def __str__(self):
return "No Barcode type is defined"
class BarcodeSizeError(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 20
def __str__(self):
return "Barcode size is out of range"
class BarcodeCodeError(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 30
def __str__(self):
return "Code was not supplied"
class ImageSizeError(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 40
def __str__(self):
return "Image height is longer than 255px and can't be printed"
class TextError(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 50
def __str__(self):
return "Text string must be supplied to the text() method"
class CashDrawerError(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 60
def __str__(self):
return "Valid pin must be set to send pulse"
class NoStatusError(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 70
def __str__(self):
return "Impossible to get status from the printer"
class TicketNotPrinted(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 80
def __str__(self):
return "A part of the ticket was not been printed"
class NoDeviceError(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 90
def __str__(self):
return "Impossible to find the printer Device"
class HandleDeviceError(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 100
def __str__(self):
return "Impossible to handle device"
|
agpl-3.0
|
cwu2011/scikit-learn
|
examples/linear_model/plot_lasso_coordinate_descent_path.py
|
254
|
2639
|
"""
=====================
Lasso and Elastic Net
=====================
Lasso and elastic net (L1 and L2 penalisation) implemented using a
coordinate descent.
The coefficients can be forced to be positive.
"""
print(__doc__)
# Author: Alexandre Gramfort <[email protected]>
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import lasso_path, enet_path
from sklearn import datasets
diabetes = datasets.load_diabetes()
X = diabetes.data
y = diabetes.target
X /= X.std(axis=0) # Standardize data (easier to set the l1_ratio parameter)
# Compute paths
eps = 5e-3 # the smaller it is the longer is the path
print("Computing regularization path using the lasso...")
alphas_lasso, coefs_lasso, _ = lasso_path(X, y, eps, fit_intercept=False)
print("Computing regularization path using the positive lasso...")
alphas_positive_lasso, coefs_positive_lasso, _ = lasso_path(
X, y, eps, positive=True, fit_intercept=False)
print("Computing regularization path using the elastic net...")
alphas_enet, coefs_enet, _ = enet_path(
X, y, eps=eps, l1_ratio=0.8, fit_intercept=False)
print("Computing regularization path using the positve elastic net...")
alphas_positive_enet, coefs_positive_enet, _ = enet_path(
X, y, eps=eps, l1_ratio=0.8, positive=True, fit_intercept=False)
# Display results
plt.figure(1)
ax = plt.gca()
ax.set_color_cycle(2 * ['b', 'r', 'g', 'c', 'k'])
l1 = plt.plot(-np.log10(alphas_lasso), coefs_lasso.T)
l2 = plt.plot(-np.log10(alphas_enet), coefs_enet.T, linestyle='--')
plt.xlabel('-Log(alpha)')
plt.ylabel('coefficients')
plt.title('Lasso and Elastic-Net Paths')
plt.legend((l1[-1], l2[-1]), ('Lasso', 'Elastic-Net'), loc='lower left')
plt.axis('tight')
plt.figure(2)
ax = plt.gca()
ax.set_color_cycle(2 * ['b', 'r', 'g', 'c', 'k'])
l1 = plt.plot(-np.log10(alphas_lasso), coefs_lasso.T)
l2 = plt.plot(-np.log10(alphas_positive_lasso), coefs_positive_lasso.T,
linestyle='--')
plt.xlabel('-Log(alpha)')
plt.ylabel('coefficients')
plt.title('Lasso and positive Lasso')
plt.legend((l1[-1], l2[-1]), ('Lasso', 'positive Lasso'), loc='lower left')
plt.axis('tight')
plt.figure(3)
ax = plt.gca()
ax.set_color_cycle(2 * ['b', 'r', 'g', 'c', 'k'])
l1 = plt.plot(-np.log10(alphas_enet), coefs_enet.T)
l2 = plt.plot(-np.log10(alphas_positive_enet), coefs_positive_enet.T,
linestyle='--')
plt.xlabel('-Log(alpha)')
plt.ylabel('coefficients')
plt.title('Elastic-Net and positive Elastic-Net')
plt.legend((l1[-1], l2[-1]), ('Elastic-Net', 'positive Elastic-Net'),
loc='lower left')
plt.axis('tight')
plt.show()
|
bsd-3-clause
|
PlanTool/plantool
|
GUI/plantool/ipp.py
|
3
|
2579
|
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.8
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_ipp', [dirname(__file__)])
except ImportError:
import _ipp
return _ipp
if fp is not None:
try:
_mod = imp.load_module('_ipp', fp, pathname, description)
finally:
fp.close()
return _mod
_ipp = swig_import_helper()
del swig_import_helper
else:
import _ipp
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
def run(argc):
return _ipp.run(argc)
run = _ipp.run
# This file is compatible with both classic and new-style classes.
|
gpl-2.0
|
beiko-lab/gengis
|
bin/Lib/traceback.py
|
53
|
11574
|
"""Extract, format and print information about Python stack traces."""
import linecache
import sys
import types
__all__ = ['extract_stack', 'extract_tb', 'format_exception',
'format_exception_only', 'format_list', 'format_stack',
'format_tb', 'print_exc', 'format_exc', 'print_exception',
'print_last', 'print_stack', 'print_tb', 'tb_lineno']
def _print(file, str='', terminator='\n'):
file.write(str+terminator)
def print_list(extracted_list, file=None):
"""Print the list of tuples as returned by extract_tb() or
extract_stack() as a formatted stack trace to the given file."""
if file is None:
file = sys.stderr
for filename, lineno, name, line in extracted_list:
_print(file,
' File "%s", line %d, in %s' % (filename,lineno,name))
if line:
_print(file, ' %s' % line.strip())
def format_list(extracted_list):
"""Format a list of traceback entry tuples for printing.
Given a list of tuples as returned by extract_tb() or
extract_stack(), return a list of strings ready for printing.
Each string in the resulting list corresponds to the item with the
same index in the argument list. Each string ends in a newline;
the strings may contain internal newlines as well, for those items
whose source text line is not None.
"""
list = []
for filename, lineno, name, line in extracted_list:
item = ' File "%s", line %d, in %s\n' % (filename,lineno,name)
if line:
item = item + ' %s\n' % line.strip()
list.append(item)
return list
def print_tb(tb, limit=None, file=None):
"""Print up to 'limit' stack trace entries from the traceback 'tb'.
If 'limit' is omitted or None, all entries are printed. If 'file'
is omitted or None, the output goes to sys.stderr; otherwise
'file' should be an open file or file-like object with a write()
method.
"""
if file is None:
file = sys.stderr
if limit is None:
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
n = 0
while tb is not None and (limit is None or n < limit):
f = tb.tb_frame
lineno = tb.tb_lineno
co = f.f_code
filename = co.co_filename
name = co.co_name
_print(file,
' File "%s", line %d, in %s' % (filename, lineno, name))
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
if line: _print(file, ' ' + line.strip())
tb = tb.tb_next
n = n+1
def format_tb(tb, limit = None):
"""A shorthand for 'format_list(extract_stack(f, limit))."""
return format_list(extract_tb(tb, limit))
def extract_tb(tb, limit = None):
"""Return list of up to limit pre-processed entries from traceback.
This is useful for alternate formatting of stack traces. If
'limit' is omitted or None, all entries are extracted. A
pre-processed stack trace entry is a quadruple (filename, line
number, function name, text) representing the information that is
usually printed for a stack trace. The text is a string with
leading and trailing whitespace stripped; if the source is not
available it is None.
"""
if limit is None:
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
list = []
n = 0
while tb is not None and (limit is None or n < limit):
f = tb.tb_frame
lineno = tb.tb_lineno
co = f.f_code
filename = co.co_filename
name = co.co_name
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
if line: line = line.strip()
else: line = None
list.append((filename, lineno, name, line))
tb = tb.tb_next
n = n+1
return list
def print_exception(etype, value, tb, limit=None, file=None):
"""Print exception up to 'limit' stack trace entries from 'tb' to 'file'.
This differs from print_tb() in the following ways: (1) if
traceback is not None, it prints a header "Traceback (most recent
call last):"; (2) it prints the exception type and value after the
stack trace; (3) if type is SyntaxError and value has the
appropriate format, it prints the line where the syntax error
occurred with a caret on the next line indicating the approximate
position of the error.
"""
if file is None:
file = sys.stderr
if tb:
_print(file, 'Traceback (most recent call last):')
print_tb(tb, limit, file)
lines = format_exception_only(etype, value)
for line in lines:
_print(file, line, '')
def format_exception(etype, value, tb, limit = None):
"""Format a stack trace and the exception information.
The arguments have the same meaning as the corresponding arguments
to print_exception(). The return value is a list of strings, each
ending in a newline and some containing internal newlines. When
these lines are concatenated and printed, exactly the same text is
printed as does print_exception().
"""
if tb:
list = ['Traceback (most recent call last):\n']
list = list + format_tb(tb, limit)
else:
list = []
list = list + format_exception_only(etype, value)
return list
def format_exception_only(etype, value):
"""Format the exception part of a traceback.
The arguments are the exception type and value such as given by
sys.last_type and sys.last_value. The return value is a list of
strings, each ending in a newline.
Normally, the list contains a single string; however, for
SyntaxError exceptions, it contains several lines that (when
printed) display detailed information about where the syntax
error occurred.
The message indicating which exception occurred is always the last
string in the list.
"""
# An instance should not have a meaningful value parameter, but
# sometimes does, particularly for string exceptions, such as
# >>> raise string1, string2 # deprecated
#
# Clear these out first because issubtype(string1, SyntaxError)
# would throw another exception and mask the original problem.
if (isinstance(etype, BaseException) or
isinstance(etype, types.InstanceType) or
etype is None or type(etype) is str):
return [_format_final_exc_line(etype, value)]
stype = etype.__name__
if not issubclass(etype, SyntaxError):
return [_format_final_exc_line(stype, value)]
# It was a syntax error; show exactly where the problem was found.
lines = []
try:
msg, (filename, lineno, offset, badline) = value.args
except Exception:
pass
else:
filename = filename or "<string>"
lines.append(' File "%s", line %d\n' % (filename, lineno))
if badline is not None:
lines.append(' %s\n' % badline.strip())
if offset is not None:
caretspace = badline.rstrip('\n')[:offset].lstrip()
# non-space whitespace (likes tabs) must be kept for alignment
caretspace = ((c.isspace() and c or ' ') for c in caretspace)
# only three spaces to account for offset1 == pos 0
lines.append(' %s^\n' % ''.join(caretspace))
value = msg
lines.append(_format_final_exc_line(stype, value))
return lines
def _format_final_exc_line(etype, value):
"""Return a list of a single line -- normal case for format_exception_only"""
valuestr = _some_str(value)
if value is None or not valuestr:
line = "%s\n" % etype
else:
line = "%s: %s\n" % (etype, valuestr)
return line
def _some_str(value):
try:
return str(value)
except Exception:
pass
try:
value = unicode(value)
return value.encode("ascii", "backslashreplace")
except Exception:
pass
return '<unprintable %s object>' % type(value).__name__
def print_exc(limit=None, file=None):
"""Shorthand for 'print_exception(sys.exc_type, sys.exc_value, sys.exc_traceback, limit, file)'.
(In fact, it uses sys.exc_info() to retrieve the same information
in a thread-safe way.)"""
if file is None:
file = sys.stderr
try:
etype, value, tb = sys.exc_info()
print_exception(etype, value, tb, limit, file)
finally:
etype = value = tb = None
def format_exc(limit=None):
"""Like print_exc() but return a string."""
try:
etype, value, tb = sys.exc_info()
return ''.join(format_exception(etype, value, tb, limit))
finally:
etype = value = tb = None
def print_last(limit=None, file=None):
"""This is a shorthand for 'print_exception(sys.last_type,
sys.last_value, sys.last_traceback, limit, file)'."""
if not hasattr(sys, "last_type"):
raise ValueError("no last exception")
if file is None:
file = sys.stderr
print_exception(sys.last_type, sys.last_value, sys.last_traceback,
limit, file)
def print_stack(f=None, limit=None, file=None):
"""Print a stack trace from its invocation point.
The optional 'f' argument can be used to specify an alternate
stack frame at which to start. The optional 'limit' and 'file'
arguments have the same meaning as for print_exception().
"""
if f is None:
try:
raise ZeroDivisionError
except ZeroDivisionError:
f = sys.exc_info()[2].tb_frame.f_back
print_list(extract_stack(f, limit), file)
def format_stack(f=None, limit=None):
"""Shorthand for 'format_list(extract_stack(f, limit))'."""
if f is None:
try:
raise ZeroDivisionError
except ZeroDivisionError:
f = sys.exc_info()[2].tb_frame.f_back
return format_list(extract_stack(f, limit))
def extract_stack(f=None, limit = None):
"""Extract the raw traceback from the current stack frame.
The return value has the same format as for extract_tb(). The
optional 'f' and 'limit' arguments have the same meaning as for
print_stack(). Each item in the list is a quadruple (filename,
line number, function name, text), and the entries are in order
from oldest to newest stack frame.
"""
if f is None:
try:
raise ZeroDivisionError
except ZeroDivisionError:
f = sys.exc_info()[2].tb_frame.f_back
if limit is None:
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
list = []
n = 0
while f is not None and (limit is None or n < limit):
lineno = f.f_lineno
co = f.f_code
filename = co.co_filename
name = co.co_name
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
if line: line = line.strip()
else: line = None
list.append((filename, lineno, name, line))
f = f.f_back
n = n+1
list.reverse()
return list
def tb_lineno(tb):
"""Calculate correct line number of traceback given in tb.
Obsolete in 2.3.
"""
return tb.tb_lineno
|
gpl-3.0
|
lhl/pensieve
|
js/node_modules/prosemirror/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
|
1407
|
47697
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module helps emulate Visual Studio 2008 behavior on top of other
build systems, primarily ninja.
"""
import os
import re
import subprocess
import sys
from gyp.common import OrderedSet
import gyp.MSVSUtil
import gyp.MSVSVersion
windows_quoter_regex = re.compile(r'(\\*)"')
def QuoteForRspFile(arg):
"""Quote a command line argument so that it appears as one argument when
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
Windows programs)."""
# See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
# threads. This is actually the quoting rules for CommandLineToArgvW, not
# for the shell, because the shell doesn't do anything in Windows. This
# works more or less because most programs (including the compiler, etc.)
# use that function to handle command line arguments.
# For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
# preceding it, and results in n backslashes + the quote. So we substitute
# in 2* what we match, +1 more, plus the quote.
arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
# %'s also need to be doubled otherwise they're interpreted as batch
# positional arguments. Also make sure to escape the % so that they're
# passed literally through escaping so they can be singled to just the
# original %. Otherwise, trying to pass the literal representation that
# looks like an environment variable to the shell (e.g. %PATH%) would fail.
arg = arg.replace('%', '%%')
# These commands are used in rsp files, so no escaping for the shell (via ^)
# is necessary.
# Finally, wrap the whole thing in quotes so that the above quote rule
# applies and whitespace isn't a word break.
return '"' + arg + '"'
def EncodeRspFileList(args):
"""Process a list of arguments using QuoteCmdExeArgument."""
# Note that the first argument is assumed to be the command. Don't add
# quotes around it because then built-ins like 'echo', etc. won't work.
# Take care to normpath only the path in the case of 'call ../x.bat' because
# otherwise the whole thing is incorrectly interpreted as a path and not
# normalized correctly.
if not args: return ''
if args[0].startswith('call '):
call, program = args[0].split(' ', 1)
program = call + ' ' + os.path.normpath(program)
else:
program = os.path.normpath(args[0])
return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
def _GenericRetrieve(root, default, path):
"""Given a list of dictionary keys |path| and a tree of dicts |root|, find
value at path, or return |default| if any of the path doesn't exist."""
if not root:
return default
if not path:
return root
return _GenericRetrieve(root.get(path[0]), default, path[1:])
def _AddPrefix(element, prefix):
"""Add |prefix| to |element| or each subelement if element is iterable."""
if element is None:
return element
# Note, not Iterable because we don't want to handle strings like that.
if isinstance(element, list) or isinstance(element, tuple):
return [prefix + e for e in element]
else:
return prefix + element
def _DoRemapping(element, map):
"""If |element| then remap it through |map|. If |element| is iterable then
each item will be remapped. Any elements not found will be removed."""
if map is not None and element is not None:
if not callable(map):
map = map.get # Assume it's a dict, otherwise a callable to do the remap.
if isinstance(element, list) or isinstance(element, tuple):
element = filter(None, [map(elem) for elem in element])
else:
element = map(element)
return element
def _AppendOrReturn(append, element):
"""If |append| is None, simply return |element|. If |append| is not None,
then add |element| to it, adding each item in |element| if it's a list or
tuple."""
if append is not None and element is not None:
if isinstance(element, list) or isinstance(element, tuple):
append.extend(element)
else:
append.append(element)
else:
return element
def _FindDirectXInstallation():
"""Try to find an installation location for the DirectX SDK. Check for the
standard environment variable, and if that doesn't exist, try to find
via the registry. May return None if not found in either location."""
# Return previously calculated value, if there is one
if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
return _FindDirectXInstallation.dxsdk_dir
dxsdk_dir = os.environ.get('DXSDK_DIR')
if not dxsdk_dir:
# Setup params to pass to and attempt to launch reg.exe.
cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in p.communicate()[0].splitlines():
if 'InstallPath' in line:
dxsdk_dir = line.split(' ')[3] + "\\"
# Cache return value
_FindDirectXInstallation.dxsdk_dir = dxsdk_dir
return dxsdk_dir
def GetGlobalVSMacroEnv(vs_version):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents. Returns all variables that are independent of the target."""
env = {}
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
# Visual Studio is actually installed.
if vs_version.Path():
env['$(VSInstallDir)'] = vs_version.Path()
env['$(VCInstallDir)'] = os.path.join(vs_version.Path(), 'VC') + '\\'
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
# set. This happens when the SDK is sync'd via src-internal, rather than
# by typical end-user installation of the SDK. If it's not set, we don't
# want to leave the unexpanded variable in the path, so simply strip it.
dxsdk_dir = _FindDirectXInstallation()
env['$(DXSDK_DIR)'] = dxsdk_dir if dxsdk_dir else ''
# Try to find an installation location for the Windows DDK by checking
# the WDK_DIR environment variable, may be None.
env['$(WDK_DIR)'] = os.environ.get('WDK_DIR', '')
return env
def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
"""Finds msvs_system_include_dirs that are common to all targets, removes
them from all targets, and returns an OrderedSet containing them."""
all_system_includes = OrderedSet(
configs[0].get('msvs_system_include_dirs', []))
for config in configs[1:]:
system_includes = config.get('msvs_system_include_dirs', [])
all_system_includes = all_system_includes & OrderedSet(system_includes)
if not all_system_includes:
return None
# Expand macros in all_system_includes.
env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
expanded_system_includes = OrderedSet([ExpandMacros(include, env)
for include in all_system_includes])
if any(['$' in include for include in expanded_system_includes]):
# Some path relies on target-specific variables, bail.
return None
# Remove system includes shared by all targets from the targets.
for config in configs:
includes = config.get('msvs_system_include_dirs', [])
if includes: # Don't insert a msvs_system_include_dirs key if not needed.
# This must check the unexpanded includes list:
new_includes = [i for i in includes if i not in all_system_includes]
config['msvs_system_include_dirs'] = new_includes
return expanded_system_includes
class MsvsSettings(object):
"""A class that understands the gyp 'msvs_...' values (especially the
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
class helps map those settings to command line options."""
def __init__(self, spec, generator_flags):
self.spec = spec
self.vs_version = GetVSVersion(generator_flags)
supported_fields = [
('msvs_configuration_attributes', dict),
('msvs_settings', dict),
('msvs_system_include_dirs', list),
('msvs_disabled_warnings', list),
('msvs_precompiled_header', str),
('msvs_precompiled_source', str),
('msvs_configuration_platform', str),
('msvs_target_platform', str),
]
configs = spec['configurations']
for field, default in supported_fields:
setattr(self, field, {})
for configname, config in configs.iteritems():
getattr(self, field)[configname] = config.get(field, default())
self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
unsupported_fields = [
'msvs_prebuild',
'msvs_postbuild',
]
unsupported = []
for field in unsupported_fields:
for config in configs.values():
if field in config:
unsupported += ["%s not supported (target %s)." %
(field, spec['target_name'])]
if unsupported:
raise Exception('\n'.join(unsupported))
def GetExtension(self):
"""Returns the extension for the target, with no leading dot.
Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
the target type.
"""
ext = self.spec.get('product_extension', None)
if ext:
return ext
return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec['type'], '')
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
target_name = self.spec.get('product_prefix', '') + \
self.spec.get('product_name', self.spec['target_name'])
target_dir = base_to_build + '\\' if base_to_build else ''
target_ext = '.' + self.GetExtension()
target_file_name = target_name + target_ext
replacements = {
'$(InputName)': '${root}',
'$(InputPath)': '${source}',
'$(IntDir)': '$!INTERMEDIATE_DIR',
'$(OutDir)\\': target_dir,
'$(PlatformName)': target_platform,
'$(ProjectDir)\\': '',
'$(ProjectName)': self.spec['target_name'],
'$(TargetDir)\\': target_dir,
'$(TargetExt)': target_ext,
'$(TargetFileName)': target_file_name,
'$(TargetName)': target_name,
'$(TargetPath)': os.path.join(target_dir, target_file_name),
}
replacements.update(GetGlobalVSMacroEnv(self.vs_version))
return replacements
def ConvertVSMacros(self, s, base_to_build=None, config=None):
"""Convert from VS macro names to something equivalent."""
env = self.GetVSMacroEnv(base_to_build, config=config)
return ExpandMacros(s, env)
def AdjustLibraries(self, libraries):
"""Strip -l from library if it's specified with that."""
libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended."""
result = _GenericRetrieve(field, default, path)
result = _DoRemapping(result, map)
result = _AddPrefix(result, prefix)
return _AppendOrReturn(append, result)
class _GetWrapper(object):
def __init__(self, parent, field, base_path, append=None):
self.parent = parent
self.field = field
self.base_path = [base_path]
self.append = append
def __call__(self, name, map=None, prefix='', default=None):
return self.parent._GetAndMunge(self.field, self.base_path + [name],
default=default, prefix=prefix, append=self.append, map=map)
def GetArch(self, config):
"""Get architecture based on msvs_configuration_platform and
msvs_target_platform. Returns either 'x86' or 'x64'."""
configuration_platform = self.msvs_configuration_platform.get(config, '')
platform = self.msvs_target_platform.get(config, '')
if not platform: # If no specific override, use the configuration's.
platform = configuration_platform
# Map from platform to architecture.
return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')
def _TargetConfig(self, config):
"""Returns the target-specific configuration."""
# There's two levels of architecture/platform specification in VS. The
# first level is globally for the configuration (this is what we consider
# "the" config at the gyp level, which will be something like 'Debug' or
# 'Release_x64'), and a second target-specific configuration, which is an
# override for the global one. |config| is remapped here to take into
# account the local target-specific overrides to the global configuration.
arch = self.GetArch(config)
if arch == 'x64' and not config.endswith('_x64'):
config += '_x64'
if arch == 'x86' and config.endswith('_x64'):
config = config.rsplit('_', 1)[0]
return config
def _Setting(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_settings."""
return self._GetAndMunge(
self.msvs_settings[config], path, default, prefix, append, map)
def _ConfigAttrib(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_configuration_attributes."""
return self._GetAndMunge(
self.msvs_configuration_attributes[config],
path, default, prefix, append, map)
def AdjustIncludeDirs(self, include_dirs, config):
"""Updates include_dirs to expand VS specific paths, and adds the system
include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
"""Updates midl_include_dirs to expand VS specific paths, and adds the
system include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = midl_include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based
on other VS settings."""
config = self._TargetConfig(config)
defines = []
if self._ConfigAttrib(['CharacterSet'], config) == '1':
defines.extend(('_UNICODE', 'UNICODE'))
if self._ConfigAttrib(['CharacterSet'], config) == '2':
defines.append('_MBCS')
defines.extend(self._Setting(
('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
return defines
def GetCompilerPdbName(self, config, expand_special):
"""Get the pdb file name that should be used for compiler invocations, or
None if there's no explicit name specified."""
config = self._TargetConfig(config)
pdbname = self._Setting(
('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
if pdbname:
pdbname = expand_special(self.ConvertVSMacros(pdbname))
return pdbname
def GetMapFileName(self, config, expand_special):
"""Gets the explicitly overriden map file name for a target or returns None
if it's not set."""
config = self._TargetConfig(config)
map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
if map_file:
map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
return map_file
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
type = self.spec['type']
root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
# TODO(scottmg): Handle OutputDirectory without OutputFile.
output_file = self._Setting((root, 'OutputFile'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetPDBName(self, config, expand_special, default):
"""Gets the explicitly overridden pdb name for a target or returns
default if it's not overridden, or if no pdb will be generated."""
config = self._TargetConfig(config)
output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
generate_debug_info = self._Setting(
('VCLinkerTool', 'GenerateDebugInformation'), config)
if generate_debug_info == 'true':
if output_file:
return expand_special(self.ConvertVSMacros(output_file, config=config))
else:
return default
else:
return None
def GetNoImportLibrary(self, config):
"""If NoImportLibrary: true, ninja will not expect the output to include
an import library."""
config = self._TargetConfig(config)
noimplib = self._Setting(('NoImportLibrary',), config)
return noimplib == 'true'
def GetAsmflags(self, config):
"""Returns the flags that need to be added to ml invocations."""
config = self._TargetConfig(config)
asmflags = []
safeseh = self._Setting(('MASM', 'UseSafeExceptionHandlers'), config)
if safeseh == 'true':
asmflags.append('/safeseh')
return asmflags
def GetCflags(self, config):
"""Returns the flags that need to be added to .c and .cc compilations."""
config = self._TargetConfig(config)
cflags = []
cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
cl = self._GetWrapper(self, self.msvs_settings[config],
'VCCLCompilerTool', append=cflags)
cl('Optimization',
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
cl('InlineFunctionExpansion', prefix='/Ob')
cl('DisableSpecificWarnings', prefix='/wd')
cl('StringPooling', map={'true': '/GF'})
cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
cl('FloatingPointModel',
map={'0': 'precise', '1': 'strict', '2': 'fast'}, prefix='/fp:',
default='0')
cl('CompileAsManaged', map={'false': '', 'true': '/clr'})
cl('WholeProgramOptimization', map={'true': '/GL'})
cl('WarningLevel', prefix='/W')
cl('WarnAsError', map={'true': '/WX'})
cl('CallingConvention',
map={'0': 'd', '1': 'r', '2': 'z', '3': 'v'}, prefix='/G')
cl('DebugInformationFormat',
map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
cl('MinimalRebuild', map={'true': '/Gm'})
cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
cl('RuntimeLibrary',
map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
cl('DefaultCharIsUnsigned', map={'true': '/J'})
cl('TreatWChar_tAsBuiltInType',
map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
cl('EnablePREfast', map={'true': '/analyze'})
cl('AdditionalOptions', prefix='')
cl('EnableEnhancedInstructionSet',
map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32', '5': 'AVX2'},
prefix='/arch:')
cflags.extend(['/FI' + f for f in self._Setting(
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
if self.vs_version.short_name in ('2013', '2013e', '2015'):
# New flag required in 2013 to maintain previous PDB behavior.
cflags.append('/FS')
# ninja handles parallelism by itself, don't have the compiler do it too.
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
return cflags
def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support.
"""
config = self._TargetConfig(config)
# The PCH is only built once by a particular source file. Usage of PCH must
# only be for the same language (i.e. C vs. C++), so only include the pch
# flags when the language matches.
if self.msvs_precompiled_header[config]:
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
if _LanguageMatchesForPch(source_ext, extension):
pch = os.path.split(self.msvs_precompiled_header[config])[1]
return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
return []
def GetCflagsC(self, config):
"""Returns the flags that need to be added to .c compilations."""
config = self._TargetConfig(config)
return self._GetPchFlags(config, '.c')
def GetCflagsCC(self, config):
"""Returns the flags that need to be added to .cc compilations."""
config = self._TargetConfig(config)
return ['/TP'] + self._GetPchFlags(config, '.cc')
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
"""Get and normalize the list of paths in AdditionalLibraryDirectories
setting."""
config = self._TargetConfig(config)
libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
config, default=[])
libpaths = [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
for p in libpaths]
return ['/LIBPATH:"' + p + '"' for p in libpaths]
def GetLibFlags(self, config, gyp_to_build_path):
"""Returns the flags that need to be added to lib commands."""
config = self._TargetConfig(config)
libflags = []
lib = self._GetWrapper(self, self.msvs_settings[config],
'VCLibrarianTool', append=libflags)
libflags.extend(self._GetAdditionalLibraryDirectories(
'VCLibrarianTool', config, gyp_to_build_path))
lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
prefix='/MACHINE:')
lib('AdditionalOptions')
return libflags
def GetDefFile(self, gyp_to_build_path):
"""Returns the .def file from sources, if any. Otherwise returns None."""
spec = self.spec
if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
return gyp_to_build_path(def_files[0])
elif len(def_files) > 1:
raise Exception("Multiple .def files")
return None
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
def_file = self.GetDefFile(gyp_to_build_path)
if def_file:
ldflags.append('/DEF:"%s"' % def_file)
def GetPGDName(self, config, expand_special):
"""Gets the explicitly overridden pgd name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
output_file = self._Setting(
('VCLinkerTool', 'ProfileGuidedDatabase'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetLdflags(self, config, gyp_to_build_path, expand_special,
manifest_base_name, output_name, is_executable, build_dir):
"""Returns the flags that need to be added to link commands, and the
manifest files."""
config = self._TargetConfig(config)
ldflags = []
ld = self._GetWrapper(self, self.msvs_settings[config],
'VCLinkerTool', append=ldflags)
self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
prefix='/MACHINE:')
ldflags.extend(self._GetAdditionalLibraryDirectories(
'VCLinkerTool', config, gyp_to_build_path))
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
ld('TreatLinkerWarningAsErrors', prefix='/WX',
map={'true': '', 'false': ':NO'})
out = self.GetOutputName(config, expand_special)
if out:
ldflags.append('/OUT:' + out)
pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
if pdb:
ldflags.append('/PDB:' + pdb)
pgd = self.GetPGDName(config, expand_special)
if pgd:
ldflags.append('/PGD:' + pgd)
map_file = self.GetMapFileName(config, expand_special)
ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
else '/MAP'})
ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
ld('AdditionalOptions', prefix='')
minimum_required_version = self._Setting(
('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
if minimum_required_version:
minimum_required_version = ',' + minimum_required_version
ld('SubSystem',
map={'1': 'CONSOLE%s' % minimum_required_version,
'2': 'WINDOWS%s' % minimum_required_version},
prefix='/SUBSYSTEM:')
stack_reserve_size = self._Setting(
('VCLinkerTool', 'StackReserveSize'), config, default='')
if stack_reserve_size:
stack_commit_size = self._Setting(
('VCLinkerTool', 'StackCommitSize'), config, default='')
if stack_commit_size:
stack_commit_size = ',' + stack_commit_size
ldflags.append('/STACK:%s%s' % (stack_reserve_size, stack_commit_size))
ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
ld('BaseAddress', prefix='/BASE:')
ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
ld('RandomizedBaseAddress',
map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
ld('DataExecutionPrevention',
map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
ld('ForceSymbolReferences', prefix='/INCLUDE:')
ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
ld('LinkTimeCodeGeneration',
map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
'4': ':PGUPDATE'},
prefix='/LTCG')
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
ld('EntryPointSymbol', prefix='/ENTRY:')
ld('Profile', map={'true': '/PROFILE'})
ld('LargeAddressAware',
map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld('AdditionalDependencies', prefix='')
if self.GetArch(config) == 'x86':
safeseh_default = 'true'
else:
safeseh_default = None
ld('ImageHasSafeExceptionHandlers',
map={'false': ':NO', 'true': ''}, prefix='/SAFESEH',
default=safeseh_default)
# If the base address is not specifically controlled, DYNAMICBASE should
# be on by default.
base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
ldflags)
if not base_flags:
ldflags.append('/DYNAMICBASE')
# If the NXCOMPAT flag has not been specified, default to on. Despite the
# documentation that says this only defaults to on when the subsystem is
# Vista or greater (which applies to the linker), the IDE defaults it on
# unless it's explicitly off.
if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
ldflags.append('/NXCOMPAT')
have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
manifest_flags, intermediate_manifest, manifest_files = \
self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
is_executable and not have_def_file, build_dir)
ldflags.extend(manifest_flags)
return ldflags, intermediate_manifest, manifest_files
def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
allow_isolation, build_dir):
"""Returns a 3-tuple:
- the set of flags that need to be added to the link to generate
a default manifest
- the intermediate manifest that the linker will generate that should be
used to assert it doesn't add anything to the merged one.
- the list of all the manifest files to be merged by the manifest tool and
included into the link."""
generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
config,
default='true')
if generate_manifest != 'true':
# This means not only that the linker should not generate the intermediate
# manifest but also that the manifest tool should do nothing even when
# additional manifests are specified.
return ['/MANIFEST:NO'], [], []
output_name = name + '.intermediate.manifest'
flags = [
'/MANIFEST',
'/ManifestFile:' + output_name,
]
# Instead of using the MANIFESTUAC flags, we generate a .manifest to
# include into the list of manifests. This allows us to avoid the need to
# do two passes during linking. The /MANIFEST flag and /ManifestFile are
# still used, and the intermediate manifest is used to assert that the
# final manifest we get from merging all the additional manifest files
# (plus the one we generate here) isn't modified by merging the
# intermediate into it.
# Always NO, because we generate a manifest file that has what we want.
flags.append('/MANIFESTUAC:NO')
config = self._TargetConfig(config)
enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
default='true')
manifest_files = []
generated_manifest_outer = \
"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
"</assembly>"
if enable_uac == 'true':
execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
config, default='0')
execution_level_map = {
'0': 'asInvoker',
'1': 'highestAvailable',
'2': 'requireAdministrator'
}
ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
default='false')
inner = '''
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level='%s' uiAccess='%s' />
</requestedPrivileges>
</security>
</trustInfo>''' % (execution_level_map[execution_level], ui_access)
else:
inner = ''
generated_manifest_contents = generated_manifest_outer % inner
generated_name = name + '.generated.manifest'
# Need to join with the build_dir here as we're writing it during
# generation time, but we return the un-joined version because the build
# will occur in that directory. We only write the file if the contents
# have changed so that simply regenerating the project files doesn't
# cause a relink.
build_dir_generated_name = os.path.join(build_dir, generated_name)
gyp.common.EnsureDirExists(build_dir_generated_name)
f = gyp.common.WriteOnDiff(build_dir_generated_name)
f.write(generated_manifest_contents)
f.close()
manifest_files = [generated_name]
if allow_isolation:
flags.append('/ALLOWISOLATION')
manifest_files += self._GetAdditionalManifestFiles(config,
gyp_to_build_path)
return flags, output_name, manifest_files
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one
generated by the linker."""
files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
default=[])
if isinstance(files, str):
files = files.split(';')
return [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
for f in files]
def IsUseLibraryDependencyInputs(self, config):
"""Returns whether the target should be linked via Use Library Dependency
Inputs (using component .objs of a given .lib)."""
config = self._TargetConfig(config)
uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
return uldi == 'true'
def IsEmbedManifest(self, config):
"""Returns whether manifest should be linked into binary."""
config = self._TargetConfig(config)
embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
default='true')
return embed == 'true'
def IsLinkIncremental(self, config):
"""Returns whether the target should be linked incrementally."""
config = self._TargetConfig(config)
link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
return link_inc != '1'
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
compiler."""
config = self._TargetConfig(config)
rcflags = []
rc = self._GetWrapper(self, self.msvs_settings[config],
'VCResourceCompilerTool', append=rcflags)
rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
rcflags.append('/I' + gyp_to_ninja_path('.'))
rc('PreprocessorDefinitions', prefix='/d')
# /l arg must be in hex without leading '0x'
rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
return rcflags
def BuildCygwinBashCommandLine(self, args, path_to_base):
"""Build a command line that runs args via cygwin bash. We assume that all
incoming paths are in Windows normpath'd form, so they need to be
converted to posix style for the part of the command line that's passed to
bash. We also have to do some Visual Studio macro emulation here because
various rules use magic VS names for things. Also note that rules that
contain ninja variables cannot be fixed here (for example ${source}), so
the outer generator needs to make sure that the paths that are written out
are in posix style, if the command line will be used here."""
cygwin_dir = os.path.normpath(
os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
cd = ('cd %s' % path_to_base).replace('\\', '/')
args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
args = ["'%s'" % a.replace("'", "'\\''") for a in args]
bash_cmd = ' '.join(args)
cmd = (
'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
'bash -c "%s ; %s"' % (cd, bash_cmd))
return cmd
def IsRuleRunUnderCygwin(self, rule):
"""Determine if an action should be run under cygwin. If the variable is
unset, or set to 1 we use cygwin."""
return int(rule.get('msvs_cygwin_shell',
self.spec.get('msvs_cygwin_shell', 1))) != 0
def _HasExplicitRuleForExtension(self, spec, extension):
"""Determine if there's an explicit rule for a particular extension."""
for rule in spec.get('rules', []):
if rule['extension'] == extension:
return True
return False
def _HasExplicitIdlActions(self, spec):
"""Determine if an action should not run midl for .idl files."""
return any([action.get('explicit_idl_action', 0)
for action in spec.get('actions', [])])
def HasExplicitIdlRulesOrActions(self, spec):
"""Determine if there's an explicit rule or action for idl files. When
there isn't we need to generate implicit rules to build MIDL .idl files."""
return (self._HasExplicitRuleForExtension(spec, 'idl') or
self._HasExplicitIdlActions(spec))
def HasExplicitAsmRules(self, spec):
"""Determine if there's an explicit rule for asm files. When there isn't we
need to generate implicit rules to assemble .asm files."""
return self._HasExplicitRuleForExtension(spec, 'asm')
def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required."""
config = self._TargetConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
def midl(name, default=None):
return self.ConvertVSMacros(midl_get(name, default=default),
config=config)
tlb = midl('TypeLibraryName', default='${root}.tlb')
header = midl('HeaderFileName', default='${root}.h')
dlldata = midl('DLLDataFileName', default='dlldata.c')
iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
proxy = midl('ProxyFileName', default='${root}_p.c')
# Note that .tlb is not included in the outputs as it is not always
# generated depending on the content of the input idl file.
outdir = midl('OutputDirectory', default='')
output = [header, dlldata, iid, proxy]
variables = [('tlb', tlb),
('h', header),
('dlldata', dlldata),
('iid', iid),
('proxy', proxy)]
# TODO(scottmg): Are there configuration settings to set these flags?
target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
return outdir, output, variables, flags
def _LanguageMatchesForPch(source_ext, pch_source_ext):
c_exts = ('.c',)
cc_exts = ('.cc', '.cxx', '.cpp')
return ((source_ext in c_exts and pch_source_ext in c_exts) or
(source_ext in cc_exts and pch_source_ext in cc_exts))
class PrecompiledHeader(object):
"""Helper to generate dependencies and build rules to handle generation of
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
"""
def __init__(
self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
self.settings = settings
self.config = config
pch_source = self.settings.msvs_precompiled_source[self.config]
self.pch_source = gyp_to_build_path(pch_source)
filename, _ = os.path.splitext(pch_source)
self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatibility
with make.py on Mac, and xcode_emulation.py."""
assert arch is None
if not self._PchHeader():
return []
pch_ext = os.path.splitext(self.pch_source)[1]
for source in sources:
if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
return [(None, None, self.output_obj)]
return []
def GetPchBuildCommands(self, arch):
"""Not used on Windows as there are no additional build steps required
(instead, existing steps are modified in GetFlagsModifications below)."""
return []
def GetFlagsModifications(self, input, output, implicit, command,
cflags_c, cflags_cc, expand_special):
"""Get the modified cflags and implicit dependencies that should be used
for the pch compilation step."""
if input == self.pch_source:
pch_output = ['/Yc' + self._PchHeader()]
if command == 'cxx':
return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
self.output_obj, [])
elif command == 'cc':
return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
self.output_obj, [])
return [], output, implicit
vs_version = None
def GetVSVersion(generator_flags):
global vs_version
if not vs_version:
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
generator_flags.get('msvs_version', 'auto'),
allow_fallback=False)
return vs_version
def _GetVsvarsSetupArgs(generator_flags, arch):
vs = GetVSVersion(generator_flags)
return vs.SetupScript()
def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict."""
if '$' in string:
for old, new in expansions.iteritems():
assert '$(' not in new, new
string = string.replace(old, new)
return string
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
a textual dump output by the cmd.exe 'set' command."""
envvars_to_save = (
'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
'include',
'lib',
'libpath',
'path',
'pathext',
'systemroot',
'temp',
'tmp',
)
env = {}
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match(envvar + '=', line.lower()):
var, setting = line.split('=', 1)
if envvar == 'path':
# Our own rules (for running gyp-win-tool) and other actions in
# Chromium rely on python being in the path. Add the path to this
# python here so that if it's not in the path when ninja is run
# later, python will still be found.
setting = os.path.dirname(sys.executable) + os.pathsep + setting
env[var.upper()] = setting
break
for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
if required not in env:
raise Exception('Environment variable "%s" '
'required to be set to valid path' % required)
return env
def _FormatAsEnvironmentBlock(envvar_dict):
"""Format as an 'environment block' directly suitable for CreateProcess.
Briefly this is a list of key=value\0, terminated by an additional \0. See
CreateProcess documentation for more details."""
block = ''
nul = '\0'
for key, value in envvar_dict.iteritems():
block += key + '=' + value + nul
block += nul
return block
def _ExtractCLPath(output_of_where):
"""Gets the path to cl.exe based on the output of calling the environment
setup batch file, followed by the equivalent of `where`."""
# Take the first line, as that's the first found in the PATH.
for line in output_of_where.strip().splitlines():
if line.startswith('LOC:'):
return line[len('LOC:'):].strip()
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
system_includes, open_out):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path.
When the following procedure to generate environment files does not
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
archs = ('x86', 'x64')
if generator_flags.get('ninja_use_custom_environment_files', 0):
cl_paths = {}
for arch in archs:
cl_paths[arch] = 'cl.exe'
return cl_paths
vs = GetVSVersion(generator_flags)
cl_paths = {}
for arch in archs:
# Extract environment variables for subprocesses.
args = vs.SetupScript(arch)
args.extend(('&&', 'set'))
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
env = _ExtractImportantEnvironment(variables)
# Inject system includes from gyp files into INCLUDE.
if system_includes:
system_includes = system_includes | OrderedSet(
env.get('INCLUDE', '').split(';'))
env['INCLUDE'] = ';'.join(system_includes)
env_block = _FormatAsEnvironmentBlock(env)
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
f.write(env_block)
f.close()
# Find cl.exe location for this architecture.
args = vs.SetupScript(arch)
args.extend(('&&',
'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
output, _ = popen.communicate()
cl_paths[arch] = _ExtractCLPath(output)
return cl_paths
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
generator: Check that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation when building via
VS, and we want this check to match for people/bots that build using ninja,
so they're not surprised when the VS build fails."""
if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
no_specials = filter(lambda x: '$' not in x, sources)
relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
missing = filter(lambda x: not os.path.exists(x), relative)
if missing:
# They'll look like out\Release\..\..\stuff\things.cc, so normalize the
# path for a slightly less crazy looking output.
cleaned_up = [os.path.normpath(x) for x in missing]
raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
# Sets some values in default_variables, which are required for many
# generators, run on Windows.
def CalculateCommonVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
# Set a variable so conditions can be based on msvs_version.
msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
'64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32
|
gpl-3.0
|
craynot/django
|
tests/apps/tests.py
|
54
|
16296
|
from __future__ import unicode_literals
import os
import warnings
from unittest import skipUnless
from django.apps import AppConfig, apps
from django.apps.registry import Apps
from django.contrib.admin.models import LogEntry
from django.core.exceptions import AppRegistryNotReady, ImproperlyConfigured
from django.db import models
from django.test import SimpleTestCase, override_settings
from django.test.utils import extend_sys_path
from django.utils import six
from django.utils._os import upath
from .default_config_app.apps import CustomConfig
from .models import SoAlternative, TotallyNormal, new_apps
# Small list with a variety of cases for tests that iterate on installed apps.
# Intentionally not in alphabetical order to check if the order is preserved.
SOME_INSTALLED_APPS = [
'apps.apps.MyAdmin',
'apps.apps.MyAuth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
SOME_INSTALLED_APPS_NAMES = [
'django.contrib.admin',
'django.contrib.auth',
] + SOME_INSTALLED_APPS[2:]
HERE = os.path.dirname(upath(__file__))
class AppsTests(SimpleTestCase):
def test_singleton_master(self):
"""
Ensures that only one master registry can exist.
"""
with self.assertRaises(RuntimeError):
Apps(installed_apps=None)
def test_ready(self):
"""
Tests the ready property of the master registry.
"""
# The master app registry is always ready when the tests run.
self.assertTrue(apps.ready)
# Non-master app registries are populated in __init__.
self.assertTrue(Apps().ready)
def test_bad_app_config(self):
"""
Tests when INSTALLED_APPS contains an incorrect app config.
"""
with self.assertRaises(ImproperlyConfigured):
with self.settings(INSTALLED_APPS=['apps.apps.BadConfig']):
pass
def test_not_an_app_config(self):
"""
Tests when INSTALLED_APPS contains a class that isn't an app config.
"""
with self.assertRaises(ImproperlyConfigured):
with self.settings(INSTALLED_APPS=['apps.apps.NotAConfig']):
pass
def test_no_such_app(self):
"""
Tests when INSTALLED_APPS contains an app that doesn't exist, either
directly or via an app config.
"""
with self.assertRaises(ImportError):
with self.settings(INSTALLED_APPS=['there is no such app']):
pass
with self.assertRaises(ImportError):
with self.settings(INSTALLED_APPS=['apps.apps.NoSuchApp']):
pass
def test_no_such_app_config(self):
"""
Tests when INSTALLED_APPS contains an entry that doesn't exist.
"""
with self.assertRaises(ImportError):
with self.settings(INSTALLED_APPS=['apps.apps.NoSuchConfig']):
pass
def test_default_app_config(self):
with self.settings(INSTALLED_APPS=['apps.default_config_app']):
config = apps.get_app_config('default_config_app')
self.assertIsInstance(config, CustomConfig)
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_get_app_configs(self):
"""
Tests apps.get_app_configs().
"""
app_configs = apps.get_app_configs()
self.assertListEqual(
[app_config.name for app_config in app_configs],
SOME_INSTALLED_APPS_NAMES)
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_get_app_config(self):
"""
Tests apps.get_app_config().
"""
app_config = apps.get_app_config('admin')
self.assertEqual(app_config.name, 'django.contrib.admin')
app_config = apps.get_app_config('staticfiles')
self.assertEqual(app_config.name, 'django.contrib.staticfiles')
with self.assertRaises(LookupError):
apps.get_app_config('admindocs')
msg = "No installed app with label 'django.contrib.auth'. Did you mean 'myauth'"
with self.assertRaisesMessage(LookupError, msg):
apps.get_app_config('django.contrib.auth')
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_is_installed(self):
"""
Tests apps.is_installed().
"""
self.assertTrue(apps.is_installed('django.contrib.admin'))
self.assertTrue(apps.is_installed('django.contrib.auth'))
self.assertTrue(apps.is_installed('django.contrib.staticfiles'))
self.assertFalse(apps.is_installed('django.contrib.admindocs'))
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_get_model(self):
"""
Tests apps.get_model().
"""
self.assertEqual(apps.get_model('admin', 'LogEntry'), LogEntry)
with self.assertRaises(LookupError):
apps.get_model('admin', 'LogExit')
# App label is case-sensitive, Model name is case-insensitive.
self.assertEqual(apps.get_model('admin', 'loGentrY'), LogEntry)
with self.assertRaises(LookupError):
apps.get_model('Admin', 'LogEntry')
# A single argument is accepted.
self.assertEqual(apps.get_model('admin.LogEntry'), LogEntry)
with self.assertRaises(LookupError):
apps.get_model('admin.LogExit')
with self.assertRaises(ValueError):
apps.get_model('admin_LogEntry')
@override_settings(INSTALLED_APPS=['apps.apps.RelabeledAppsConfig'])
def test_relabeling(self):
self.assertEqual(apps.get_app_config('relabeled').name, 'apps')
def test_duplicate_labels(self):
with six.assertRaisesRegex(self, ImproperlyConfigured, "Application labels aren't unique"):
with self.settings(INSTALLED_APPS=['apps.apps.PlainAppsConfig', 'apps']):
pass
def test_duplicate_names(self):
with six.assertRaisesRegex(self, ImproperlyConfigured, "Application names aren't unique"):
with self.settings(INSTALLED_APPS=['apps.apps.RelabeledAppsConfig', 'apps']):
pass
def test_import_exception_is_not_masked(self):
"""
App discovery should preserve stack traces. Regression test for #22920.
"""
with six.assertRaisesRegex(self, ImportError, "Oops"):
with self.settings(INSTALLED_APPS=['import_error_package']):
pass
def test_models_py(self):
"""
Tests that the models in the models.py file were loaded correctly.
"""
self.assertEqual(apps.get_model("apps", "TotallyNormal"), TotallyNormal)
with self.assertRaises(LookupError):
apps.get_model("apps", "SoAlternative")
with self.assertRaises(LookupError):
new_apps.get_model("apps", "TotallyNormal")
self.assertEqual(new_apps.get_model("apps", "SoAlternative"), SoAlternative)
def test_dynamic_load(self):
"""
Makes a new model at runtime and ensures it goes into the right place.
"""
old_models = list(apps.get_app_config("apps").get_models())
# Construct a new model in a new app registry
body = {}
new_apps = Apps(["apps"])
meta_contents = {
'app_label': "apps",
'apps': new_apps,
}
meta = type(str("Meta"), tuple(), meta_contents)
body['Meta'] = meta
body['__module__'] = TotallyNormal.__module__
temp_model = type(str("SouthPonies"), (models.Model,), body)
# Make sure it appeared in the right place!
self.assertListEqual(list(apps.get_app_config("apps").get_models()), old_models)
with self.assertRaises(LookupError):
apps.get_model("apps", "SouthPonies")
self.assertEqual(new_apps.get_model("apps", "SouthPonies"), temp_model)
def test_model_clash(self):
"""
Test for behavior when two models clash in the app registry.
"""
new_apps = Apps(["apps"])
meta_contents = {
'app_label': "apps",
'apps': new_apps,
}
body = {}
body['Meta'] = type(str("Meta"), tuple(), meta_contents)
body['__module__'] = TotallyNormal.__module__
type(str("SouthPonies"), (models.Model,), body)
# When __name__ and __module__ match we assume the module
# was reloaded and issue a warning. This use-case is
# useful for REPL. Refs #23621.
body = {}
body['Meta'] = type(str("Meta"), tuple(), meta_contents)
body['__module__'] = TotallyNormal.__module__
with warnings.catch_warnings(record=True) as w:
type(str("SouthPonies"), (models.Model,), body)
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[-1].category, RuntimeWarning))
self.assertEqual(str(w[-1].message),
"Model 'apps.southponies' was already registered. "
"Reloading models is not advised as it can lead to inconsistencies, "
"most notably with related models.")
# If it doesn't appear to be a reloaded module then we expect
# a RuntimeError.
body = {}
body['Meta'] = type(str("Meta"), tuple(), meta_contents)
body['__module__'] = TotallyNormal.__module__ + '.whatever'
with six.assertRaisesRegex(self, RuntimeError,
"Conflicting 'southponies' models in application 'apps':.*"):
type(str("SouthPonies"), (models.Model,), body)
def test_get_containing_app_config_apps_not_ready(self):
"""
apps.get_containing_app_config() should raise an exception if
apps.apps_ready isn't True.
"""
apps.apps_ready = False
try:
with self.assertRaisesMessage(AppRegistryNotReady, "Apps aren't loaded yet"):
apps.get_containing_app_config('foo')
finally:
apps.apps_ready = True
def test_lazy_model_operation(self):
"""
Tests apps.lazy_model_operation().
"""
model_classes = []
initial_pending = set(apps._pending_operations)
def test_func(*models):
model_classes[:] = models
class LazyA(models.Model):
pass
# Test models appearing twice, and models appearing consecutively
model_keys = [('apps', model_name) for model_name in ['lazya', 'lazyb', 'lazyb', 'lazyc', 'lazya']]
apps.lazy_model_operation(test_func, *model_keys)
# LazyModelA shouldn't be waited on since it's already registered,
# and LazyModelC shouldn't be waited on until LazyModelB exists.
self.assertSetEqual(set(apps._pending_operations) - initial_pending, {('apps', 'lazyb')})
# Test that multiple operations can wait on the same model
apps.lazy_model_operation(test_func, ('apps', 'lazyb'))
class LazyB(models.Model):
pass
self.assertListEqual(model_classes, [LazyB])
# Now we are just waiting on LazyModelC.
self.assertSetEqual(set(apps._pending_operations) - initial_pending, {('apps', 'lazyc')})
class LazyC(models.Model):
pass
# Everything should be loaded - make sure the callback was executed properly.
self.assertListEqual(model_classes, [LazyA, LazyB, LazyB, LazyC, LazyA])
class Stub(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
class AppConfigTests(SimpleTestCase):
"""Unit tests for AppConfig class."""
def test_path_set_explicitly(self):
"""If subclass sets path as class attr, no module attributes needed."""
class MyAppConfig(AppConfig):
path = 'foo'
ac = MyAppConfig('label', Stub())
self.assertEqual(ac.path, 'foo')
def test_explicit_path_overrides(self):
"""If path set as class attr, overrides __path__ and __file__."""
class MyAppConfig(AppConfig):
path = 'foo'
ac = MyAppConfig('label', Stub(__path__=['a'], __file__='b/__init__.py'))
self.assertEqual(ac.path, 'foo')
def test_dunder_path(self):
"""If single element in __path__, use it (in preference to __file__)."""
ac = AppConfig('label', Stub(__path__=['a'], __file__='b/__init__.py'))
self.assertEqual(ac.path, 'a')
def test_no_dunder_path_fallback_to_dunder_file(self):
"""If there is no __path__ attr, use __file__."""
ac = AppConfig('label', Stub(__file__='b/__init__.py'))
self.assertEqual(ac.path, 'b')
def test_empty_dunder_path_fallback_to_dunder_file(self):
"""If the __path__ attr is empty, use __file__ if set."""
ac = AppConfig('label', Stub(__path__=[], __file__='b/__init__.py'))
self.assertEqual(ac.path, 'b')
def test_multiple_dunder_path_fallback_to_dunder_file(self):
"""If the __path__ attr is length>1, use __file__ if set."""
ac = AppConfig('label', Stub(__path__=['a', 'b'], __file__='c/__init__.py'))
self.assertEqual(ac.path, 'c')
def test_no_dunder_path_or_dunder_file(self):
"""If there is no __path__ or __file__, raise ImproperlyConfigured."""
with self.assertRaises(ImproperlyConfigured):
AppConfig('label', Stub())
def test_empty_dunder_path_no_dunder_file(self):
"""If the __path__ attr is empty and there is no __file__, raise."""
with self.assertRaises(ImproperlyConfigured):
AppConfig('label', Stub(__path__=[]))
def test_multiple_dunder_path_no_dunder_file(self):
"""If the __path__ attr is length>1 and there is no __file__, raise."""
with self.assertRaises(ImproperlyConfigured):
AppConfig('label', Stub(__path__=['a', 'b']))
def test_duplicate_dunder_path_no_dunder_file(self):
"""
If the __path__ attr contains duplicate paths and there is no
__file__, they duplicates should be deduplicated (#25246).
"""
ac = AppConfig('label', Stub(__path__=['a', 'a']))
self.assertEqual(ac.path, 'a')
@skipUnless(six.PY3, "Namespace packages sans __init__.py were added in Python 3.3")
class NamespacePackageAppTests(SimpleTestCase):
# We need nsapp to be top-level so our multiple-paths tests can add another
# location for it (if its inside a normal package with an __init__.py that
# isn't possible). In order to avoid cluttering the already-full tests/ dir
# (which is on sys.path), we add these new entries to sys.path temporarily.
base_location = os.path.join(HERE, 'namespace_package_base')
other_location = os.path.join(HERE, 'namespace_package_other_base')
app_path = os.path.join(base_location, 'nsapp')
def test_single_path(self):
"""
A Py3.3+ namespace package can be an app if it has only one path.
"""
with extend_sys_path(self.base_location):
with self.settings(INSTALLED_APPS=['nsapp']):
app_config = apps.get_app_config('nsapp')
self.assertEqual(app_config.path, upath(self.app_path))
def test_multiple_paths(self):
"""
A Py3.3+ namespace package with multiple locations cannot be an app.
(Because then we wouldn't know where to load its templates, static
assets, etc from.)
"""
# Temporarily add two directories to sys.path that both contain
# components of the "nsapp" package.
with extend_sys_path(self.base_location, self.other_location):
with self.assertRaises(ImproperlyConfigured):
with self.settings(INSTALLED_APPS=['nsapp']):
pass
def test_multiple_paths_explicit_path(self):
"""
Multiple locations are ok only if app-config has explicit path.
"""
# Temporarily add two directories to sys.path that both contain
# components of the "nsapp" package.
with extend_sys_path(self.base_location, self.other_location):
with self.settings(INSTALLED_APPS=['nsapp.apps.NSAppConfig']):
app_config = apps.get_app_config('nsapp')
self.assertEqual(app_config.path, upath(self.app_path))
|
bsd-3-clause
|
dga4654dan/UTM-Demo
|
V_1_0_2_1/UtmDemo_Sfs_2.9.0/UtmDemo_Sfs_2.9.0_Server/lib/Lib/xml/dom/pulldom.py
|
8
|
11891
|
import xml.sax
import xml.sax.handler
import types
try:
_StringTypes = [types.StringType, types.UnicodeType]
except AttributeError:
_StringTypes = [types.StringType]
START_ELEMENT = "START_ELEMENT"
END_ELEMENT = "END_ELEMENT"
COMMENT = "COMMENT"
START_DOCUMENT = "START_DOCUMENT"
END_DOCUMENT = "END_DOCUMENT"
PROCESSING_INSTRUCTION = "PROCESSING_INSTRUCTION"
IGNORABLE_WHITESPACE = "IGNORABLE_WHITESPACE"
CHARACTERS = "CHARACTERS"
class PullDOM(xml.sax.ContentHandler):
_locator = None
document = None
def __init__(self, documentFactory=None):
from xml.dom import XML_NAMESPACE
self.documentFactory = documentFactory
self.firstEvent = [None, None]
self.lastEvent = self.firstEvent
self.elementStack = []
self.push = self.elementStack.append
try:
self.pop = self.elementStack.pop
except AttributeError:
# use class' pop instead
pass
self._ns_contexts = [{XML_NAMESPACE:'xml'}] # contains uri -> prefix dicts
self._current_context = self._ns_contexts[-1]
self.pending_events = []
def pop(self):
result = self.elementStack[-1]
del self.elementStack[-1]
return result
def setDocumentLocator(self, locator):
self._locator = locator
def startPrefixMapping(self, prefix, uri):
if not hasattr(self, '_xmlns_attrs'):
self._xmlns_attrs = []
self._xmlns_attrs.append((prefix or 'xmlns', uri))
self._ns_contexts.append(self._current_context.copy())
self._current_context[uri] = prefix or None
def endPrefixMapping(self, prefix):
self._current_context = self._ns_contexts.pop()
def startElementNS(self, name, tagName , attrs):
uri, localname = name
if uri:
# When using namespaces, the reader may or may not
# provide us with the original name. If not, create
# *a* valid tagName from the current context.
if tagName is None:
prefix = self._current_context[uri]
if prefix:
tagName = prefix + ":" + localname
else:
tagName = localname
if self.document:
node = self.document.createElementNS(uri, tagName)
else:
node = self.buildDocument(uri, tagName)
else:
# When the tagname is not prefixed, it just appears as
# localname
if self.document:
node = self.document.createElement(localname)
else:
node = self.buildDocument(None, localname)
# Retrieve xml namespace declaration attributes.
xmlns_uri = 'http://www.w3.org/2000/xmlns/'
xmlns_attrs = getattr(self, '_xmlns_attrs', None)
if xmlns_attrs is not None:
for aname, value in xmlns_attrs:
if aname == 'xmlns':
qname = aname
else:
qname = 'xmlns:' + aname
attr = self.document.createAttributeNS(xmlns_uri, qname)
attr.value = value
node.setAttributeNodeNS(attr)
self._xmlns_attrs = []
for aname,value in attrs.items():
a_uri, a_localname = aname
if a_uri:
prefix = self._current_context[a_uri]
if prefix:
qname = prefix + ":" + a_localname
else:
qname = a_localname
attr = self.document.createAttributeNS(a_uri, qname)
node.setAttributeNodeNS(attr)
else:
attr = self.document.createAttribute(a_localname)
node.setAttributeNode(attr)
attr.value = value
self.lastEvent[1] = [(START_ELEMENT, node), None]
self.lastEvent = self.lastEvent[1]
self.push(node)
def endElementNS(self, name, tagName):
self.lastEvent[1] = [(END_ELEMENT, self.pop()), None]
self.lastEvent = self.lastEvent[1]
def startElement(self, name, attrs):
if self.document:
node = self.document.createElement(name)
else:
node = self.buildDocument(None, name)
for aname,value in attrs.items():
attr = self.document.createAttribute(aname)
attr.value = value
node.setAttributeNode(attr)
self.lastEvent[1] = [(START_ELEMENT, node), None]
self.lastEvent = self.lastEvent[1]
self.push(node)
def endElement(self, name):
self.lastEvent[1] = [(END_ELEMENT, self.pop()), None]
self.lastEvent = self.lastEvent[1]
def comment(self, s):
if self.document:
node = self.document.createComment(s)
self.lastEvent[1] = [(COMMENT, node), None]
self.lastEvent = self.lastEvent[1]
else:
event = [(COMMENT, s), None]
self.pending_events.append(event)
def processingInstruction(self, target, data):
if self.document:
node = self.document.createProcessingInstruction(target, data)
self.lastEvent[1] = [(PROCESSING_INSTRUCTION, node), None]
self.lastEvent = self.lastEvent[1]
else:
event = [(PROCESSING_INSTRUCTION, target, data), None]
self.pending_events.append(event)
def ignorableWhitespace(self, chars):
node = self.document.createTextNode(chars)
self.lastEvent[1] = [(IGNORABLE_WHITESPACE, node), None]
self.lastEvent = self.lastEvent[1]
def characters(self, chars):
node = self.document.createTextNode(chars)
self.lastEvent[1] = [(CHARACTERS, node), None]
self.lastEvent = self.lastEvent[1]
def startDocument(self):
if self.documentFactory is None:
import xml.dom.minidom
self.documentFactory = xml.dom.minidom.Document.implementation
def buildDocument(self, uri, tagname):
# Can't do that in startDocument, since we need the tagname
# XXX: obtain DocumentType
node = self.documentFactory.createDocument(uri, tagname, None)
self.document = node
self.lastEvent[1] = [(START_DOCUMENT, node), None]
self.lastEvent = self.lastEvent[1]
self.push(node)
# Put everything we have seen so far into the document
for e in self.pending_events:
if e[0][0] == PROCESSING_INSTRUCTION:
_,target,data = e[0]
n = self.document.createProcessingInstruction(target, data)
e[0] = (PROCESSING_INSTRUCTION, n)
elif e[0][0] == COMMENT:
n = self.document.createComment(e[0][1])
e[0] = (COMMENT, n)
else:
raise AssertionError("Unknown pending event ",e[0][0])
self.lastEvent[1] = e
self.lastEvent = e
self.pending_events = None
return node.firstChild
def endDocument(self):
self.lastEvent[1] = [(END_DOCUMENT, self.document), None]
self.pop()
def clear(self):
"clear(): Explicitly release parsing structures"
self.document = None
class ErrorHandler:
def warning(self, exception):
print exception
def error(self, exception):
raise exception
def fatalError(self, exception):
raise exception
class DOMEventStream:
def __init__(self, stream, parser, bufsize):
self.stream = stream
self.parser = parser
self.bufsize = bufsize
if not hasattr(self.parser, 'feed'):
self.getEvent = self._slurp
self.reset()
def reset(self):
self.pulldom = PullDOM()
# This content handler relies on namespace support
self.parser.setFeature(xml.sax.handler.feature_namespaces, 1)
self.parser.setContentHandler(self.pulldom)
def __getitem__(self, pos):
rc = self.getEvent()
if rc:
return rc
raise IndexError
def next(self):
rc = self.getEvent()
if rc:
return rc
raise StopIteration
def __iter__(self):
return self
def expandNode(self, node):
event = self.getEvent()
parents = [node]
while event:
token, cur_node = event
if cur_node is node:
return
if token != END_ELEMENT:
parents[-1].appendChild(cur_node)
if token == START_ELEMENT:
parents.append(cur_node)
elif token == END_ELEMENT:
del parents[-1]
event = self.getEvent()
def getEvent(self):
# use IncrementalParser interface, so we get the desired
# pull effect
if not self.pulldom.firstEvent[1]:
self.pulldom.lastEvent = self.pulldom.firstEvent
while not self.pulldom.firstEvent[1]:
buf = self.stream.read(self.bufsize)
if not buf:
self.parser.close()
return None
self.parser.feed(buf)
rc = self.pulldom.firstEvent[1][0]
self.pulldom.firstEvent[1] = self.pulldom.firstEvent[1][1]
return rc
def _slurp(self):
""" Fallback replacement for getEvent() using the
standard SAX2 interface, which means we slurp the
SAX events into memory (no performance gain, but
we are compatible to all SAX parsers).
"""
self.parser.parse(self.stream)
self.getEvent = self._emit
return self._emit()
def _emit(self):
""" Fallback replacement for getEvent() that emits
the events that _slurp() read previously.
"""
rc = self.pulldom.firstEvent[1][0]
self.pulldom.firstEvent[1] = self.pulldom.firstEvent[1][1]
return rc
def clear(self):
"""clear(): Explicitly release parsing objects"""
self.pulldom.clear()
del self.pulldom
self.parser = None
self.stream = None
class SAX2DOM(PullDOM):
def startElementNS(self, name, tagName , attrs):
PullDOM.startElementNS(self, name, tagName, attrs)
curNode = self.elementStack[-1]
parentNode = self.elementStack[-2]
parentNode.appendChild(curNode)
def startElement(self, name, attrs):
PullDOM.startElement(self, name, attrs)
curNode = self.elementStack[-1]
parentNode = self.elementStack[-2]
parentNode.appendChild(curNode)
def processingInstruction(self, target, data):
PullDOM.processingInstruction(self, target, data)
node = self.lastEvent[0][1]
parentNode = self.elementStack[-1]
parentNode.appendChild(node)
def ignorableWhitespace(self, chars):
PullDOM.ignorableWhitespace(self, chars)
node = self.lastEvent[0][1]
parentNode = self.elementStack[-1]
parentNode.appendChild(node)
def characters(self, chars):
PullDOM.characters(self, chars)
node = self.lastEvent[0][1]
parentNode = self.elementStack[-1]
parentNode.appendChild(node)
default_bufsize = (2 ** 14) - 20
def parse(stream_or_string, parser=None, bufsize=None):
if bufsize is None:
bufsize = default_bufsize
if type(stream_or_string) in _StringTypes:
stream = open(stream_or_string)
else:
stream = stream_or_string
if not parser:
parser = xml.sax.make_parser()
return DOMEventStream(stream, parser, bufsize)
def parseString(string, parser=None):
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
bufsize = len(string)
buf = StringIO(string)
if not parser:
parser = xml.sax.make_parser()
return DOMEventStream(buf, parser, bufsize)
|
gpl-2.0
|
avaitla/Haskell-to-C---Bridge
|
pygccxml-1.0.0/unittests/has_binary_operator_traits_tester.py
|
1
|
1826
|
# Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import unittest
import autoconfig
import parser_test_case
from pygccxml import utils
from pygccxml import parser
from pygccxml import declarations
class tester_t( parser_test_case.parser_test_case_t ):
COMPILATION_MODE = parser.COMPILATION_MODE.ALL_AT_ONCE
global_ns = None
def __init__(self, *args ):
parser_test_case.parser_test_case_t.__init__( self, *args )
self.header = 'has_public_binary_operator_traits.hpp'
self.global_ns = None
def setUp(self):
if not tester_t.global_ns:
decls = parser.parse( [self.header], self.config )
tester_t.global_ns = declarations.get_global_namespace( decls )
self.global_ns = tester_t.global_ns
def test_yes( self ):
yes_ns = self.global_ns.namespace( 'yes' )
for typedef in yes_ns.typedefs():
self.failUnless( declarations.has_public_equal( typedef )
, "Class '%s' should have public operator==" % typedef.decl_string )
def test_no( self ):
no_ns = self.global_ns.namespace( 'no' )
for typedef in no_ns.typedefs():
self.failUnless( not declarations.has_public_equal( typedef )
, "Class '%s' should not have public operator==" % typedef.decl_string )
def create_suite():
suite = unittest.TestSuite()
suite.addTest( unittest.makeSuite(tester_t))
return suite
def run_suite():
unittest.TextTestRunner(verbosity=2).run( create_suite() )
if __name__ == "__main__":
run_suite()
|
bsd-3-clause
|
karlnapf/shogun
|
examples/undocumented/python/transfer_multitask_clustered_logistic_regression.py
|
4
|
1634
|
#!/usr/bin/env python
from numpy import array,hstack,sin,cos
from numpy.random import seed, rand
from tools.load import LoadMatrix
lm=LoadMatrix()
traindat = lm.load_numbers('../data/fm_train_real.dat')
testdat = lm.load_numbers('../data/fm_test_real.dat')
label_traindat = lm.load_labels('../data/label_train_twoclass.dat')
parameter_list = [[traindat,testdat,label_traindat]]
def transfer_multitask_clustered_logistic_regression (fm_train=traindat,fm_test=testdat,label_train=label_traindat):
from shogun import BinaryLabels, Task, TaskGroup, MSG_DEBUG
try:
from shogun import MultitaskClusteredLogisticRegression
except ImportError:
print("MultitaskClusteredLogisticRegression not available")
exit()
import shogun as sg
features = sg.features(hstack((traindat,sin(traindat),cos(traindat))))
labels = BinaryLabels(hstack((label_train,label_train,label_train)))
n_vectors = features.get_num_vectors()
task_one = Task(0,n_vectors//3)
task_two = Task(n_vectors//3,2*n_vectors//3)
task_three = Task(2*n_vectors//3,n_vectors)
task_group = TaskGroup()
task_group.append_task(task_one)
task_group.append_task(task_two)
task_group.append_task(task_three)
mtlr = MultitaskClusteredLogisticRegression(1.0,100.0,features,labels,task_group,2)
#mtlr.io.set_loglevel(MSG_DEBUG)
mtlr.set_tolerance(1e-3) # use 1e-2 tolerance
mtlr.set_max_iter(100)
mtlr.train()
mtlr.set_current_task(0)
#print mtlr.get_w()
out = mtlr.apply_regression().get_labels()
return out
if __name__=='__main__':
print('TransferMultitaskClusteredLogisticRegression')
transfer_multitask_clustered_logistic_regression(*parameter_list[0])
|
bsd-3-clause
|
blkFinch/hutch-val-pair-project
|
AdventureGameMain.py
|
1
|
6122
|
'''
This is our main file
'''
import random
from Player import Player
def intro(): #intro to the GAME
print('Good morning')
input('What a weird dream you woke from, huh? PRESS ENTER')
input('Maybe once you open your eyes ... PRESS ENTER')
input('You will see something IS a little off ... PRESS ENTER')
input('Your bedroom is looking a little like ... PRESS ENTER')
print(' ___ __ ____ __ __')
print(' / / / / / ___/ / / / /')
print(' / /_/ / / /__ / / / /')
print(' / __ / / __/ / / / /')
print(' / / / / / /_ / /___ / /__')
print('/__/ /__/ /____/ /_____//_____/')
print('\n\n') #added for formatting
titleCard()
def titleCard():
print()
print('A D V E N T U R E G A M E') #title
print('\t\tver 1.4')#version number
print('\nDev. N. Hutchison \tR. Valerio')
print('\n\n')
def createCharacter():
name = input('Please enter your name: ')
health = int(10)
key = 0
notes = ('Noob')
player = Player(name,health,key,notes)
return player
def testHealth(player):
print('NAME: ',player.getName())
print('HEALTH: ',player.getHealth())
print('NOTES: ',player.getNotes())
print('KEY: ',player.getKey())
print('Subtract 1 HP')
player.setHealth(player.getHealth()-1)
print('HEALTH: ',player.getHealth())
def bedroom(player):
print('bedroom here <DESCRIPTION>')#player is in bedroom has to climb out
print('1 = Option 1')
print('2 = Climb wall')
print('\n')
action = input('ACTION: ')
if action == '1':
print('You did a thing! maybe looked around')
bedroom(player)
elif action == '2':
print('You decided to climb the wall')
climbGame(player)
def climbGame(player):
print('<the climb game goes here>')
input('press ENTER to continue')
mainChamber(player)
def lose():
for count in range(4):
print('!!!!')
print('Y O U \n\t\tL O S E')
def win():
print('W')
print('I')
print('N')
print('!')
def mainChamber(player):
print('The main Chamber<DESCRIPTION>')
print('1 = See wizard')
print('2 = Exit gate')
print('3 = Maze')
print('4 = Jump in hole')
action = input('\nACTION: ')
if action =='1':
wizardRoom(player)
elif action == '2':
exitGate(player)
elif action == '3':
maze(player)
elif action == '4':
lose()
def wizardRoom(player):
keycheck = player.getKey()
if keycheck == 0:
print('You see a wizard<description>')
print('he asks you a pretty tough riddle: \n')
print('<RIDDLE>')
print('\n')
print('1 = Answer')
print('2 = Go back')
print()
action = input('ACTION: ')
if action == '1':
riddle = input('ANSWER: ')
if riddle == 'cat':
print('you did it!')
print('\nGOT KEY!')
player.setKey(player.getKey()+1)
wizardRoom(player)
else:
print('wrong!!!')
wizardRoom(player)
else:
print('YOU HAVE THE KEY')
print()
input('PRESS ENTER TO RETURN TO MAIN CHAMBER')
print()
mainChamber(player)
def exitGate(player):
print('The Gate<DESCRIPTION>')
print('1 = EXIT')
print('2 = Go back')
print()
action = input('ACTION: ')
if action == '1':
if player.getKey() >= 1:
win()
else:
print('The gate is locked')
exitGate(player)
elif action == '2':
mainChamber(player)
def maze(player): #visualization of never-ending maze
print("The entrance to the maze is imposing.")
choose = input("Keep going? Y or N? ")
if choose == "Y" or choose == "y":
print('Twists and turns, it is never-ending.')
artFiller = 30
for a in range(artFiller):
for c in range(a + 1):
print('!!', end='')
print()
print("You near the end, you hear something ...")
battle(player)
else:
lose()
def battle(player): #intro to battle sequence
input('"BWA HA HA HA HA! Little human -- you are DEAD." PRESS ENTER')
input("From the depths of the shadows a dark figure emerges. PRESS ENTER")
input("Fear builds as BAD GUY approaches. PRESS ENTER")
print()
hits(player)
def hits(player): #battle sequence between player and random enemy
you = player.getHealth()
badGuy = random.randint(7, 9)
strike = random.randint(1, 2)
pick = input("What do you do? Press 1 to ATTACK. Press 2 to RUN. ")
if pick == "1":
badGuy -= strike
print("Arms swinging wildly, a fist hits BAD GUY.")
print('"OUCH THAT HURT ME! I WILL SHOW YOU!"')
print("BAD GUY kicks your shin! OWIE!")
you -= strike
print()
pick2 = input("What do you do? Press 1 to ATTACK. Press 2 to cry. ")
if pick2 == "1":
print("Fury reigns as BAD GUY takes and gives jabs.")
input("Who will win? PRESS ENTER.")
while you > 0 and badGuy > 0: #final hit sequence to determine random winner
badGuy -= strike
print("BOOM!")
you -= strike
player.setHealth(you)
print("POW!!")
if you <= 0:
print()
lose()
elif badGuy <= 0:
print()
print(player.getName(), "WINS!!")
winMaze(player)
else:
print()
print("Crying won't save you. BAD GUY has no sympathy.")
lose()
else:
print("There is no place to run.")
lose()
def winMaze(player):
print('You survived maze \n')
print('the riddle answer is: "cat"')
print('GOING BACK TO MAIN CHAMBER')
print()
mainChamber(player)
def main():
intro()
player = createCharacter()
testHealth(player) #FOR DEBUGGING
bedroom(player)
main()
|
mit
|
HerlanAssis/Django-AulaOsvandoSantana
|
lib/python2.7/site-packages/setuptools/command/install.py
|
206
|
4052
|
import setuptools
import sys
import glob
from distutils.command.install import install as _install
from distutils.errors import DistutilsArgError
class install(_install):
"""Use easy_install to install the package, w/dependencies"""
user_options = _install.user_options + [
('old-and-unmanageable', None, "Try not to use this!"),
('single-version-externally-managed', None,
"used by system package builders to create 'flat' eggs"),
]
boolean_options = _install.boolean_options + [
'old-and-unmanageable', 'single-version-externally-managed',
]
new_commands = [
('install_egg_info', lambda self: True),
('install_scripts', lambda self: True),
]
_nc = dict(new_commands)
def initialize_options(self):
_install.initialize_options(self)
self.old_and_unmanageable = None
self.single_version_externally_managed = None
self.no_compile = None # make DISTUTILS_DEBUG work right!
def finalize_options(self):
_install.finalize_options(self)
if self.root:
self.single_version_externally_managed = True
elif self.single_version_externally_managed:
if not self.root and not self.record:
raise DistutilsArgError(
"You must specify --record or --root when building system"
" packages"
)
def handle_extra_path(self):
if self.root or self.single_version_externally_managed:
# explicit backward-compatibility mode, allow extra_path to work
return _install.handle_extra_path(self)
# Ignore extra_path when installing an egg (or being run by another
# command without --root or --single-version-externally-managed
self.path_file = None
self.extra_dirs = ''
def run(self):
# Explicit request for old-style install? Just do it
if self.old_and_unmanageable or self.single_version_externally_managed:
return _install.run(self)
# Attempt to detect whether we were called from setup() or by another
# command. If we were called by setup(), our caller will be the
# 'run_command' method in 'distutils.dist', and *its* caller will be
# the 'run_commands' method. If we were called any other way, our
# immediate caller *might* be 'run_command', but it won't have been
# called by 'run_commands'. This is slightly kludgy, but seems to
# work.
#
caller = sys._getframe(2)
caller_module = caller.f_globals.get('__name__','')
caller_name = caller.f_code.co_name
if caller_module != 'distutils.dist' or caller_name!='run_commands':
# We weren't called from the command line or setup(), so we
# should run in backward-compatibility mode to support bdist_*
# commands.
_install.run(self)
else:
self.do_egg_install()
def do_egg_install(self):
easy_install = self.distribution.get_command_class('easy_install')
cmd = easy_install(
self.distribution, args="x", root=self.root, record=self.record,
)
cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
cmd.always_copy_from = '.' # make sure local-dir eggs get installed
# pick up setup-dir .egg files only: no .egg-info
cmd.package_index.scan(glob.glob('*.egg'))
self.run_command('bdist_egg')
args = [self.distribution.get_command_obj('bdist_egg').egg_output]
if setuptools.bootstrap_install_from:
# Bootstrap self-installation of setuptools
args.insert(0, setuptools.bootstrap_install_from)
cmd.args = args
cmd.run()
setuptools.bootstrap_install_from = None
# XXX Python 3.1 doesn't see _nc if this is inside the class
install.sub_commands = [
cmd for cmd in _install.sub_commands if cmd[0] not in install._nc
] + install.new_commands
|
mit
|
hynnet/hiwifi-openwrt-HC5661-HC5761
|
staging_dir/target-mipsel_r2_uClibc-0.9.33.2/usr/lib/python2.7/unittest/test/test_functiontestcase.py
|
115
|
5534
|
import unittest
from .support import LoggingResult
class Test_FunctionTestCase(unittest.TestCase):
# "Return the number of tests represented by the this test object. For
# TestCase instances, this will always be 1"
def test_countTestCases(self):
test = unittest.FunctionTestCase(lambda: None)
self.assertEqual(test.countTestCases(), 1)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if setUp() raises
# an exception.
def test_run_call_order__error_in_setUp(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
raise RuntimeError('raised by setUp')
def test():
events.append('test')
def tearDown():
events.append('tearDown')
expected = ['startTest', 'setUp', 'addError', 'stopTest']
unittest.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if the test raises
# an error (as opposed to a failure).
def test_run_call_order__error_in_test(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
def test():
events.append('test')
raise RuntimeError('raised by test')
def tearDown():
events.append('tearDown')
expected = ['startTest', 'setUp', 'test', 'addError', 'tearDown',
'stopTest']
unittest.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if the test signals
# a failure (as opposed to an error).
def test_run_call_order__failure_in_test(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
def test():
events.append('test')
self.fail('raised by test')
def tearDown():
events.append('tearDown')
expected = ['startTest', 'setUp', 'test', 'addFailure', 'tearDown',
'stopTest']
unittest.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if tearDown() raises
# an exception.
def test_run_call_order__error_in_tearDown(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
def test():
events.append('test')
def tearDown():
events.append('tearDown')
raise RuntimeError('raised by tearDown')
expected = ['startTest', 'setUp', 'test', 'tearDown', 'addError',
'stopTest']
unittest.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "Return a string identifying the specific test case."
#
# Because of the vague nature of the docs, I'm not going to lock this
# test down too much. Really all that can be asserted is that the id()
# will be a string (either 8-byte or unicode -- again, because the docs
# just say "string")
def test_id(self):
test = unittest.FunctionTestCase(lambda: None)
self.assertIsInstance(test.id(), basestring)
# "Returns a one-line description of the test, or None if no description
# has been provided. The default implementation of this method returns
# the first line of the test method's docstring, if available, or None."
def test_shortDescription__no_docstring(self):
test = unittest.FunctionTestCase(lambda: None)
self.assertEqual(test.shortDescription(), None)
# "Returns a one-line description of the test, or None if no description
# has been provided. The default implementation of this method returns
# the first line of the test method's docstring, if available, or None."
def test_shortDescription__singleline_docstring(self):
desc = "this tests foo"
test = unittest.FunctionTestCase(lambda: None, description=desc)
self.assertEqual(test.shortDescription(), "this tests foo")
if __name__ == '__main__':
unittest.main()
|
gpl-2.0
|
opendreambox/python-coherence
|
coherence/json.py
|
5
|
3334
|
# -*- coding: utf-8 -*-
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
import simplejson as json
from twisted.web import resource,static
from twisted.internet import defer
from coherence import log
class JsonInterface(resource.Resource,log.Loggable):
logCategory = 'json'
#isLeaf = False
def __init__(self, controlpoint):
self.controlpoint = controlpoint
self.controlpoint.coherence.add_web_resource('json',
self)
self.children = {}
def render_GET(self,request):
d = defer.maybeDeferred(self.do_the_render,request)
return d
def render_POST(self,request):
d = defer.maybeDeferred(self.do_the_render,request)
return d
def getChildWithDefault(self,path,request):
self.info('getChildWithDefault, %s, %s, %s %s %r' % (request.method, path, request.uri, request.client,request.args))
#return self.do_the_render(request)
d = defer.maybeDeferred(self.do_the_render,request)
return d
def do_the_render(self,request):
self.warning('do_the_render, %s, %s, %s %r %s' % (request.method, request.path,request.uri, request.args, request.client))
msg = "Houston, we've got a problem"
path = request.path.split('/')
path = path[2:]
self.warning('path %r' % path)
if request.method in ('GET','POST'):
request.postpath = None
if request.method == 'GET':
if path[0] == 'devices':
return self.list_devices(request)
else:
device = self.controlpoint.get_device_with_id(path[0])
if device != None:
service = device.get_service_by_type(path[1])
if service != None:
action = service.get_action(path[2])
if action != None:
return self.call_action(action,request)
else:
msg = "action %r on service type %r for device %r not found" % (path[2],path[1],path[0])
else:
msg = "service type %r for device %r not found" % (path[1],path[0])
else:
msg = "device with id %r not found" % path[0]
request.setResponseCode(404,message=msg)
return static.Data("<html><p>%s</p></html>" % msg,'text/html')
def list_devices(self,request):
devices = []
for device in self.controlpoint.get_devices():
devices.append(device.as_dict())
return static.Data(json.dumps(devices),'application/json')
def call_action(self,action,request):
kwargs = {}
for entry,value_list in request.args.items():
kwargs[entry] = unicode(value_list[0])
def to_json(result):
self.warning("to_json")
return static.Data(json.dumps(result),'application/json')
def fail(f):
request.setResponseCode(404)
return static.Data("<html><p>Houston, we've got a problem</p></html>",'text/html')
d = action.call(**kwargs)
d.addCallback(to_json)
d.addErrback(fail)
return d
|
mit
|
BehavioralInsightsTeam/edx-platform
|
cms/djangoapps/contentstore/views/tests/test_access.py
|
21
|
1669
|
"""
Tests access.py
"""
from django.contrib.auth.models import User
from django.test import TestCase
from opaque_keys.edx.locator import CourseLocator
from contentstore.views.access import get_user_role
from student.auth import add_users
from student.roles import CourseInstructorRole, CourseStaffRole
from student.tests.factories import AdminFactory
class RolesTest(TestCase):
"""
Tests for lti user role serialization.
"""
def setUp(self):
""" Test case setup """
super(RolesTest, self).setUp()
self.global_admin = AdminFactory()
self.instructor = User.objects.create_user('testinstructor', '[email protected]', 'foo')
self.staff = User.objects.create_user('teststaff', '[email protected]', 'foo')
self.course_key = CourseLocator('mitX', '101', 'test')
def test_get_user_role_instructor(self):
"""
Verifies if user is instructor.
"""
add_users(self.global_admin, CourseInstructorRole(self.course_key), self.instructor)
self.assertEqual(
'instructor',
get_user_role(self.instructor, self.course_key)
)
add_users(self.global_admin, CourseStaffRole(self.course_key), self.staff)
self.assertEqual(
'instructor',
get_user_role(self.instructor, self.course_key)
)
def test_get_user_role_staff(self):
"""
Verifies if user is staff.
"""
add_users(self.global_admin, CourseStaffRole(self.course_key), self.staff)
self.assertEqual(
'staff',
get_user_role(self.staff, self.course_key)
)
|
agpl-3.0
|
alaski/nova
|
nova/tests/unit/compute/test_compute_xen.py
|
4
|
3115
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for expectations of behaviour from the Xen driver."""
import mock
from oslo_utils import importutils
from nova.compute import power_state
import nova.conf
from nova import context
from nova import objects
from nova.objects import instance as instance_obj
from nova.tests.unit.compute import eventlet_utils
from nova.tests.unit import fake_instance
from nova.tests.unit.virt.xenapi import stubs
from nova.virt.xenapi import vm_utils
CONF = nova.conf.CONF
class ComputeXenTestCase(stubs.XenAPITestBaseNoDB):
def setUp(self):
super(ComputeXenTestCase, self).setUp()
self.flags(compute_driver='xenapi.XenAPIDriver')
self.flags(connection_url='test_url',
connection_password='test_pass',
group='xenserver')
stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests)
self.compute = importutils.import_object(CONF.compute_manager)
# execute power syncing synchronously for testing:
self.compute._sync_power_pool = eventlet_utils.SyncPool()
def test_sync_power_states_instance_not_found(self):
db_instance = fake_instance.fake_db_instance()
ctxt = context.get_admin_context()
instance_list = instance_obj._make_instance_list(ctxt,
objects.InstanceList(), [db_instance], None)
instance = instance_list[0]
@mock.patch.object(vm_utils, 'lookup')
@mock.patch.object(objects.InstanceList, 'get_by_host')
@mock.patch.object(self.compute.driver, 'get_num_instances')
@mock.patch.object(self.compute, '_sync_instance_power_state')
def do_test(mock_compute_sync_powerstate,
mock_compute_get_num_instances,
mock_instance_list_get_by_host,
mock_vm_utils_lookup):
mock_instance_list_get_by_host.return_value = instance_list
mock_compute_get_num_instances.return_value = 1
mock_vm_utils_lookup.return_value = None
self.compute._sync_power_states(ctxt)
mock_instance_list_get_by_host.assert_called_once_with(
ctxt, self.compute.host, expected_attrs=[], use_slave=True)
mock_compute_get_num_instances.assert_called_once_with()
mock_compute_sync_powerstate.assert_called_once_with(
ctxt, instance, power_state.NOSTATE, use_slave=True)
mock_vm_utils_lookup.assert_called_once_with(
self.compute.driver._session, instance['name'],
False)
do_test()
|
apache-2.0
|
takeshineshiro/nova
|
nova/network/security_group/security_group_base.py
|
60
|
9049
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Piston Cloud Computing, Inc.
# Copyright 2012 Red Hat, Inc.
# Copyright 2013 Nicira, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import urllib
from oslo_config import cfg
from nova import exception
from nova.i18n import _
from nova import utils
CONF = cfg.CONF
class SecurityGroupBase(object):
def __init__(self, skip_policy_check=False):
self.skip_policy_check = skip_policy_check
def parse_cidr(self, cidr):
if cidr:
try:
cidr = urllib.unquote(cidr).decode()
except Exception as e:
self.raise_invalid_cidr(cidr, e)
if not utils.is_valid_cidr(cidr):
self.raise_invalid_cidr(cidr)
return cidr
else:
return '0.0.0.0/0'
@staticmethod
def new_group_ingress_rule(grantee_group_id, protocol, from_port,
to_port):
return SecurityGroupBase._new_ingress_rule(
protocol, from_port, to_port, group_id=grantee_group_id)
@staticmethod
def new_cidr_ingress_rule(grantee_cidr, protocol, from_port, to_port):
return SecurityGroupBase._new_ingress_rule(
protocol, from_port, to_port, cidr=grantee_cidr)
@staticmethod
def _new_ingress_rule(ip_protocol, from_port, to_port,
group_id=None, cidr=None):
values = {}
if group_id:
values['group_id'] = group_id
# Open everything if an explicit port range or type/code are not
# specified, but only if a source group was specified.
ip_proto_upper = ip_protocol.upper() if ip_protocol else ''
if (ip_proto_upper == 'ICMP' and
from_port is None and to_port is None):
from_port = -1
to_port = -1
elif (ip_proto_upper in ['TCP', 'UDP'] and from_port is None
and to_port is None):
from_port = 1
to_port = 65535
elif cidr:
values['cidr'] = cidr
if ip_protocol and from_port is not None and to_port is not None:
ip_protocol = str(ip_protocol)
try:
# Verify integer conversions
from_port = int(from_port)
to_port = int(to_port)
except ValueError:
if ip_protocol.upper() == 'ICMP':
raise exception.InvalidInput(reason=_("Type and"
" Code must be integers for ICMP protocol type"))
else:
raise exception.InvalidInput(reason=_("To and From ports "
"must be integers"))
if ip_protocol.upper() not in ['TCP', 'UDP', 'ICMP']:
raise exception.InvalidIpProtocol(protocol=ip_protocol)
# Verify that from_port must always be less than
# or equal to to_port
if (ip_protocol.upper() in ['TCP', 'UDP'] and
(from_port > to_port)):
raise exception.InvalidPortRange(from_port=from_port,
to_port=to_port, msg="Former value cannot"
" be greater than the later")
# Verify valid TCP, UDP port ranges
if (ip_protocol.upper() in ['TCP', 'UDP'] and
(from_port < 1 or to_port > 65535)):
raise exception.InvalidPortRange(from_port=from_port,
to_port=to_port, msg="Valid TCP ports should"
" be between 1-65535")
# Verify ICMP type and code
if (ip_protocol.upper() == "ICMP" and
(from_port < -1 or from_port > 255 or
to_port < -1 or to_port > 255)):
raise exception.InvalidPortRange(from_port=from_port,
to_port=to_port, msg="For ICMP, the"
" type:code must be valid")
values['protocol'] = ip_protocol
values['from_port'] = from_port
values['to_port'] = to_port
else:
# If cidr based filtering, protocol and ports are mandatory
if cidr:
return None
return values
def create_security_group_rule(self, context, security_group, new_rule):
if self.rule_exists(security_group, new_rule):
msg = (_('This rule already exists in group %s') %
new_rule['parent_group_id'])
self.raise_group_already_exists(msg)
return self.add_rules(context, new_rule['parent_group_id'],
security_group['name'],
[new_rule])[0]
def rule_exists(self, security_group, new_rule):
"""Indicates whether the specified rule is already
defined in the given security group.
"""
for rule in security_group['rules']:
keys = ('group_id', 'cidr', 'from_port', 'to_port', 'protocol')
for key in keys:
if rule.get(key) != new_rule.get(key):
break
else:
return rule.get('id') or True
return False
def validate_property(self, value, property, allowed):
pass
def ensure_default(self, context):
pass
def trigger_handler(self, event, *args):
pass
def trigger_rules_refresh(self, context, id):
"""Called when a rule is added to or removed from a security_group."""
pass
def trigger_members_refresh(self, context, group_ids):
"""Called when a security group gains a new or loses a member.
Sends an update request to each compute node for each instance for
which this is relevant.
"""
pass
def populate_security_groups(self, instance, security_groups):
"""Called when populating the database for an instances
security groups.
"""
raise NotImplementedError()
def create_security_group(self, context, name, description):
raise NotImplementedError()
def update_security_group(self, context, security_group,
name, description):
raise NotImplementedError()
def get(self, context, name=None, id=None, map_exception=False):
raise NotImplementedError()
def list(self, context, names=None, ids=None, project=None,
search_opts=None):
raise NotImplementedError()
def destroy(self, context, security_group):
raise NotImplementedError()
def add_rules(self, context, id, name, vals):
raise NotImplementedError()
def remove_rules(self, context, security_group, rule_ids):
raise NotImplementedError()
def get_rule(self, context, id):
raise NotImplementedError()
def get_instance_security_groups(self, context, instance_uuid,
detailed=False):
raise NotImplementedError()
def add_to_instance(self, context, instance, security_group_name):
"""Add security group to the instance.
:param context: The request context.
:param instance: nova.objects.instance.Instance object.
:param security_group_name: security group name to add
"""
raise NotImplementedError()
def remove_from_instance(self, context, instance, security_group_name):
"""Remove the security group associated with the instance.
:param context: The request context.
:param instance: nova.objects.instance.Instance object.
:param security_group_name: security group name to remove
"""
raise NotImplementedError()
@staticmethod
def raise_invalid_property(msg):
raise exception.Invalid(msg)
@staticmethod
def raise_group_already_exists(msg):
raise exception.Invalid(msg)
@staticmethod
def raise_invalid_group(msg):
raise exception.Invalid(msg)
@staticmethod
def raise_invalid_cidr(cidr, decoding_exception=None):
raise exception.InvalidCidr(cidr=cidr)
@staticmethod
def raise_over_quota(msg):
raise exception.SecurityGroupLimitExceeded(msg)
@staticmethod
def raise_not_found(msg):
raise exception.SecurityGroupNotFound(msg)
|
apache-2.0
|
pramasoul/micropython
|
tests/extmod/framebuf1.py
|
15
|
2168
|
try:
import framebuf
except ImportError:
print("SKIP")
raise SystemExit
w = 5
h = 16
size = w * h // 8
buf = bytearray(size)
maps = {
framebuf.MONO_VLSB: "MONO_VLSB",
framebuf.MONO_HLSB: "MONO_HLSB",
framebuf.MONO_HMSB: "MONO_HMSB",
}
for mapping in maps.keys():
for x in range(size):
buf[x] = 0
fbuf = framebuf.FrameBuffer(buf, w, h, mapping)
print(maps[mapping])
# access as buffer
print(memoryview(fbuf)[0])
# fill
fbuf.fill(1)
print(buf)
fbuf.fill(0)
print(buf)
# put pixel
fbuf.pixel(0, 0, 1)
fbuf.pixel(4, 0, 1)
fbuf.pixel(0, 15, 1)
fbuf.pixel(4, 15, 1)
print(buf)
# clear pixel
fbuf.pixel(4, 15, 0)
print(buf)
# get pixel
print(fbuf.pixel(0, 0), fbuf.pixel(1, 1))
# hline
fbuf.fill(0)
fbuf.hline(0, 1, w, 1)
print("hline", buf)
# vline
fbuf.fill(0)
fbuf.vline(1, 0, h, 1)
print("vline", buf)
# rect
fbuf.fill(0)
fbuf.rect(1, 1, 3, 3, 1)
print("rect", buf)
# fill rect
fbuf.fill(0)
fbuf.fill_rect(0, 0, 0, 3, 1) # zero width, no-operation
fbuf.fill_rect(1, 1, 3, 3, 1)
print("fill_rect", buf)
# line
fbuf.fill(0)
fbuf.line(1, 1, 3, 3, 1)
print("line", buf)
# line steep negative gradient
fbuf.fill(0)
fbuf.line(3, 3, 2, 1, 1)
print("line", buf)
# scroll
fbuf.fill(0)
fbuf.pixel(2, 7, 1)
fbuf.scroll(0, 1)
print(buf)
fbuf.scroll(0, -2)
print(buf)
fbuf.scroll(1, 0)
print(buf)
fbuf.scroll(-1, 0)
print(buf)
fbuf.scroll(2, 2)
print(buf)
# print text
fbuf.fill(0)
fbuf.text("hello", 0, 0, 1)
print(buf)
fbuf.text("hello", 0, 0, 0) # clear
print(buf)
# char out of font range set to chr(127)
fbuf.text(str(chr(31)), 0, 0)
print(buf)
print()
# test invalid constructor, and stride argument
try:
fbuf = framebuf.FrameBuffer(buf, w, h, -1, w)
except ValueError:
print("ValueError")
# test legacy constructor
fbuf = framebuf.FrameBuffer1(buf, w, h)
fbuf = framebuf.FrameBuffer1(buf, w, h, w)
print(framebuf.MVLSB == framebuf.MONO_VLSB)
|
mit
|
anthonybishopric/pyboxfs
|
setup.py
|
1
|
2401
|
#!/usr/bin/env python
#from distribute_setup import use_setuptools
#use_setuptools()
from setuptools import setup
import sys
PY3 = sys.version_info >= (3,)
VERSION = "0.4.1"
COMMANDS = ['fscat',
'fscp',
'fsinfo',
'fsls',
'fsmv',
'fscp',
'fsrm',
'fsserve',
'fstree',
'fsmkdir',
'fsmount']
classifiers = [
"Development Status :: 5 - Production/Stable",
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: System :: Filesystems',
]
long_desc = """Pyfilesystem is a module that provides a simplified common interface to many types of filesystem. Filesystems exposed via Pyfilesystem can also be served over the network, or 'mounted' on the native filesystem.
Even if you only need to work with file and directories on the local hard-drive, Pyfilesystem can simplify your code and make it more robust -- with the added advantage that you can change where the files are located by changing a single line of code.
"""
extra = {}
if PY3:
extra["use_2to3"] = True
setup(install_requires=['distribute', 'six'],
name='fs',
version=VERSION,
description="Filesystem abstraction",
long_description=long_desc,
license = "BSD",
author="Will McGugan",
author_email="[email protected]",
url="http://code.google.com/p/pyfilesystem/",
download_url="http://code.google.com/p/pyfilesystem/downloads/list",
platforms = ['any'],
packages=['fs',
'fs.expose',
'fs.expose.dokan',
'fs.expose.fuse',
'fs.expose.wsgi',
'fs.tests',
'fs.wrapfs',
'fs.osfs',
'fs.contrib',
'fs.contrib.bigfs',
'fs.contrib.boxfs',
'fs.contrib.davfs',
'fs.contrib.tahoelafs',
'fs.commands'],
scripts=['fs/commands/%s' % command for command in COMMANDS],
classifiers=classifiers,
**extra
)
|
bsd-3-clause
|
modsy/incubator-airflow
|
airflow/operators/check_operator.py
|
5
|
8837
|
from builtins import zip
from builtins import str
import logging
from airflow.exceptions import AirflowException
from airflow.hooks import BaseHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class CheckOperator(BaseOperator):
"""
Performs checks against a db. The ``CheckOperator`` expects
a sql query that will return a single row. Each value on that
first row is evaluated using python ``bool`` casting. If any of the
values return ``False`` the check is failed and errors out.
Note that Python bool casting evals the following as ``False``:
* False
* 0
* Empty string (``""``)
* Empty list (``[]``)
* Empty dictionary or set (``{}``)
Given a query like ``SELECT COUNT(*) FROM foo``, it will fail only if
the count ``== 0``. You can craft much more complex query that could,
for instance, check that the table has the same number of rows as
the source table upstream, or that the count of today's partition is
greater than yesterday's partition, or that a set of metrics are less
than 3 standard deviation for the 7 day average.
This operator can be used as a data quality check in your pipeline, and
depending on where you put it in your DAG, you have the choice to
stop the critical path, preventing from
publishing dubious data, or on the side and receive email alerts
without stopping the progress of the DAG.
Note that this is an abstract class and get_db_hook
needs to be defined. Whereas a get_db_hook is hook that gets a
single record from an external source.
:param sql: the sql to be executed
:type sql: string
"""
template_fields = ('sql',)
template_ext = ('.hql', '.sql',)
ui_color = '#fff7e6'
@apply_defaults
def __init__(
self, sql,
conn_id=None,
*args, **kwargs):
super(CheckOperator, self).__init__(*args, **kwargs)
self.conn_id = conn_id
self.sql = sql
def execute(self, context=None):
logging.info('Executing SQL check: ' + self.sql)
records = self.get_db_hook().get_first(self.sql)
logging.info("Record: " + str(records))
if not records:
raise AirflowException("The query returned None")
elif not all([bool(r) for r in records]):
exceptstr = "Test failed.\nQuery:\n{q}\nResults:\n{r!s}"
raise AirflowException(exceptstr.format(q=self.sql, r=records))
logging.info("Success.")
def get_db_hook(self):
return BaseHook.get_hook(conn_id=self.conn_id)
def _convert_to_float_if_possible(s):
'''
A small helper function to convert a string to a numeric value
if appropriate
:param s: the string to be converted
:type s: str
'''
try:
ret = float(s)
except (ValueError, TypeError):
ret = s
return ret
class ValueCheckOperator(BaseOperator):
"""
Performs a simple value check using sql code.
Note that this is an abstract class and get_db_hook
needs to be defined. Whereas a get_db_hook is hook that gets a
single record from an external source.
:param sql: the sql to be executed
:type sql: string
"""
__mapper_args__ = {
'polymorphic_identity': 'ValueCheckOperator'
}
template_fields = ('sql',)
template_ext = ('.hql', '.sql',)
ui_color = '#fff7e6'
@apply_defaults
def __init__(
self, sql, pass_value, tolerance=None,
conn_id=None,
*args, **kwargs):
super(ValueCheckOperator, self).__init__(*args, **kwargs)
self.sql = sql
self.conn_id = conn_id
self.pass_value = _convert_to_float_if_possible(pass_value)
tol = _convert_to_float_if_possible(tolerance)
self.tol = tol if isinstance(tol, float) else None
self.is_numeric_value_check = isinstance(self.pass_value, float)
self.has_tolerance = self.tol is not None
def execute(self, context=None):
logging.info('Executing SQL check: ' + self.sql)
records = self.get_db_hook().get_first(self.sql)
if not records:
raise AirflowException("The query returned None")
test_results = []
except_temp = ("Test failed.\nPass value:{self.pass_value}\n"
"Query:\n{self.sql}\nResults:\n{records!s}")
if not self.is_numeric_value_check:
tests = [str(r) == self.pass_value for r in records]
elif self.is_numeric_value_check:
try:
num_rec = [float(r) for r in records]
except (ValueError, TypeError) as e:
cvestr = "Converting a result to float failed.\n"
raise AirflowException(cvestr+except_temp.format(**locals()))
if self.has_tolerance:
tests = [
r / (1 + self.tol) <= self.pass_value <= r / (1 - self.tol)
for r in num_rec]
else:
tests = [r == self.pass_value for r in num_rec]
if not all(tests):
raise AirflowException(except_temp.format(**locals()))
def get_db_hook(self):
return BaseHook.get_hook(conn_id=self.conn_id)
class IntervalCheckOperator(BaseOperator):
"""
Checks that the values of metrics given as SQL expressions are within
a certain tolerance of the ones from days_back before.
Note that this is an abstract class and get_db_hook
needs to be defined. Whereas a get_db_hook is hook that gets a
single record from an external source.
:param table: the table name
:type table: str
:param days_back: number of days between ds and the ds we want to check
against. Defaults to 7 days
:type days_back: int
:param metrics_threshold: a dictionary of ratios indexed by metrics
:type metrics_threshold: dict
"""
__mapper_args__ = {
'polymorphic_identity': 'IntervalCheckOperator'
}
template_fields = ('sql1', 'sql2')
template_ext = ('.hql', '.sql',)
ui_color = '#fff7e6'
@apply_defaults
def __init__(
self, table, metrics_thresholds,
date_filter_column='ds', days_back=-7,
conn_id=None,
*args, **kwargs):
super(IntervalCheckOperator, self).__init__(*args, **kwargs)
self.table = table
self.metrics_thresholds = metrics_thresholds
self.metrics_sorted = sorted(metrics_thresholds.keys())
self.date_filter_column = date_filter_column
self.days_back = -abs(days_back)
self.conn_id = conn_id
sqlexp = ', '.join(self.metrics_sorted)
sqlt = ("SELECT {sqlexp} FROM {table}"
" WHERE {date_filter_column}=").format(**locals())
self.sql1 = sqlt + "'{{ ds }}'"
self.sql2 = sqlt + "'{{ macros.ds_add(ds, "+str(self.days_back)+") }}'"
def execute(self, context=None):
hook = self.get_db_hook()
logging.info('Executing SQL check: ' + self.sql2)
row2 = hook.get_first(self.sql2)
logging.info('Executing SQL check: ' + self.sql1)
row1 = hook.get_first(self.sql1)
if not row2:
raise AirflowException("The query {q} returned None").format(q=self.sql2)
if not row1:
raise AirflowException("The query {q} returned None").format(q=self.sql1)
current = dict(zip(self.metrics_sorted, row1))
reference = dict(zip(self.metrics_sorted, row2))
ratios = {}
test_results = {}
rlog = "Ratio for {0}: {1} \n Ratio threshold : {2}"
fstr = "'{k}' check failed. {r} is above {tr}"
estr = "The following tests have failed:\n {0}"
countstr = "The following {j} tests out of {n} failed:"
for m in self.metrics_sorted:
if current[m] == 0 or reference[m] == 0:
ratio = None
else:
ratio = float(max(current[m], reference[m])) / \
min(current[m], reference[m])
logging.info(rlog.format(m, ratio, self.metrics_thresholds[m]))
ratios[m] = ratio
test_results[m] = ratio < self.metrics_thresholds[m]
if not all(test_results.values()):
failed_tests = [it[0] for it in test_results.items() if not it[1]]
j = len(failed_tests)
n = len(self.metrics_sorted)
logging.warning(countstr.format(**locals()))
for k in failed_tests:
logging.warning(fstr.format(k=k, r=ratios[k],
tr=self.metrics_thresholds[k]))
raise AirflowException(estr.format(", ".join(failed_tests)))
logging.info("All tests have passed")
def get_db_hook(self):
return BaseHook.get_hook(conn_id=self.conn_id)
|
apache-2.0
|
wolverineav/horizon
|
openstack_dashboard/test/integration_tests/tests/test_volumes.py
|
5
|
16332
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from openstack_dashboard.test.integration_tests import decorators
from openstack_dashboard.test.integration_tests import helpers
from openstack_dashboard.test.integration_tests.regions import messages
class TestVolumesBasic(helpers.TestCase):
"""Login as demo user"""
VOLUME_NAME = helpers.gen_random_resource_name("volume")
@property
def volumes_page(self):
return self.home_pg.go_to_compute_volumes_volumespage()
def test_volume_create_edit_delete(self):
"""This test case checks create, edit, delete volume functionality:
Steps:
1. Login to Horizon Dashboard
2. Navigate to Project -> Compute -> Volumes page
3. Create new volume
4. Check that the volume is in the list
5. Check that no Error messages present
6. Edit the volume
7. Check that the volume is still in the list
8. Check that no Error messages present
9. Delete the volume via proper page (depends on user)
10. Check that the volume is absent in the list
11. Check that no Error messages present
"""
volumes_page = self.home_pg.go_to_compute_volumes_volumespage()
volumes_page.create_volume(self.VOLUME_NAME)
self.assertTrue(
volumes_page.find_message_and_dismiss(messages.INFO))
self.assertFalse(
volumes_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(volumes_page.is_volume_present(self.VOLUME_NAME))
self.assertTrue(volumes_page.is_volume_status(self.VOLUME_NAME,
'Available'))
new_name = "edited_" + self.VOLUME_NAME
volumes_page.edit_volume(self.VOLUME_NAME, new_name, "description")
self.assertTrue(
volumes_page.find_message_and_dismiss(messages.INFO))
self.assertFalse(
volumes_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(volumes_page.is_volume_present(new_name))
self.assertTrue(volumes_page.is_volume_status(new_name, 'Available'))
volumes_page = self.volumes_page
volumes_page.delete_volume(new_name)
self.assertTrue(
volumes_page.find_message_and_dismiss(messages.SUCCESS))
self.assertFalse(
volumes_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(volumes_page.is_volume_deleted(new_name))
# NOTE(tsufiev): A short regression test on bug 1553314: we try to
# re-open 'Create Volume' button after the volume was deleted. If the
# regression occurs, the form won't appear (because link is going to be
# invalid in this case). Give JavaScript callbacks an additional second
# to do all the job and possibly cause the regression.
if not isinstance(self, helpers.AdminTestCase):
time.sleep(1)
form = volumes_page.volumes_table.create_volume()
form.cancel()
def test_volumes_pagination(self):
"""This test checks volumes pagination
Steps:
1) Login to Horizon Dashboard
2) Go to Project -> Compute -> Volumes -> Volumes tab and create
three volumes
3) Navigate to user settings page
4) Change 'Items Per Page' value to 1
5) Go to Project -> Compute -> Volumes -> Volumes tab or
Admin -> System -> Volumes -> Volumes tab (depends on user)
6) Check that only 'Next' link is available, only one volume is
available (and it has correct name)
7) Click 'Next' and check that both 'Prev' and 'Next' links are
available, only one volume is available (and it has correct name)
8) Click 'Next' and check that only 'Prev' link is available,
only one volume is visible (and it has correct name)
9) Click 'Prev' and check result (should be the same as for step7)
10) Click 'Prev' and check result (should be the same as for step6)
11) Go to user settings page and restore 'Items Per Page'
12) Delete created volumes
"""
volumes_page = self.home_pg.go_to_compute_volumes_volumespage()
count = 3
items_per_page = 1
volumes_names = ["{0}_{1}".format(self.VOLUME_NAME, i) for i in
range(count)]
for volume_name in volumes_names:
volumes_page.create_volume(volume_name)
volumes_page.find_message_and_dismiss(messages.INFO)
self.assertTrue(volumes_page.is_volume_present(volume_name))
first_page_definition = {'Next': True, 'Prev': False,
'Count': items_per_page,
'Names': [volumes_names[2]]}
second_page_definition = {'Next': True, 'Prev': True,
'Count': items_per_page,
'Names': [volumes_names[1]]}
third_page_definition = {'Next': False, 'Prev': True,
'Count': items_per_page,
'Names': [volumes_names[0]]}
settings_page = self.home_pg.go_to_settings_usersettingspage()
settings_page.change_pagesize(items_per_page)
settings_page.find_message_and_dismiss(messages.SUCCESS)
volumes_page = self.volumes_page
volumes_page.volumes_table.assert_definition(first_page_definition)
volumes_page.volumes_table.turn_next_page()
volumes_page.volumes_table.assert_definition(second_page_definition)
volumes_page.volumes_table.turn_next_page()
volumes_page.volumes_table.assert_definition(third_page_definition)
volumes_page.volumes_table.turn_prev_page()
volumes_page.volumes_table.assert_definition(second_page_definition)
volumes_page.volumes_table.turn_prev_page()
volumes_page.volumes_table.assert_definition(first_page_definition)
settings_page = self.home_pg.go_to_settings_usersettingspage()
settings_page.change_pagesize()
settings_page.find_message_and_dismiss(messages.SUCCESS)
volumes_page = self.volumes_page
volumes_page.delete_volumes(volumes_names)
volumes_page.find_message_and_dismiss(messages.SUCCESS)
self.assertTrue(volumes_page.are_volumes_deleted(volumes_names))
class TestAdminVolumes(helpers.AdminTestCase, TestVolumesBasic):
"""Login as admin user"""
VOLUME_NAME = helpers.gen_random_resource_name("volume")
@property
def volumes_page(self):
return self.home_pg.go_to_system_volumes_volumespage()
class TestVolumesAdvanced(helpers.TestCase):
"""Login as demo user"""
VOLUME_NAME = helpers.gen_random_resource_name("volume")
@property
def volumes_page(self):
return self.home_pg.go_to_compute_volumes_volumespage()
@decorators.skip_because(bugs=['1584057'])
def test_manage_volume_attachments(self):
"""This test case checks attach/detach actions for volume
Steps:
1. Login to Horizon Dashboard as horizon user
2. Navigate to Project -> Compute -> Instances, create instance
3. Navigate to Project -> Compute -> Volumes, create volume
4. Attach volume to instance from step2
5. Check that volume status and link to instance
6. Detach volume from instance
7. Check volume status
8. Delete volume and instance
"""
instance_name = helpers.gen_random_resource_name('instance')
instances_page = self.home_pg.go_to_compute_instancespage()
instances_page.create_instance(instance_name)
instances_page.find_message_and_dismiss(messages.SUCCESS)
self.assertFalse(
instances_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(instances_page.is_instance_active(instance_name))
volumes_page = self.volumes_page
volumes_page.create_volume(self.VOLUME_NAME)
volumes_page.find_message_and_dismiss(messages.INFO)
self.assertFalse(volumes_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(volumes_page.is_volume_status(self.VOLUME_NAME,
'Available'))
volumes_page.attach_volume_to_instance(self.VOLUME_NAME, instance_name)
volumes_page.find_message_and_dismiss(messages.INFO)
self.assertFalse(volumes_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(volumes_page.is_volume_status(self.VOLUME_NAME,
'In-use'))
self.assertTrue(
volumes_page.is_volume_attached_to_instance(self.VOLUME_NAME,
instance_name))
volumes_page.detach_volume_from_instance(self.VOLUME_NAME,
instance_name)
volumes_page.find_message_and_dismiss(messages.SUCCESS)
self.assertFalse(volumes_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(volumes_page.is_volume_status(self.VOLUME_NAME,
'Available'))
volumes_page.delete_volume(self.VOLUME_NAME)
volumes_page.find_message_and_dismiss(messages.SUCCESS)
self.assertFalse(volumes_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(volumes_page.is_volume_deleted(self.VOLUME_NAME))
instances_page = self.home_pg.go_to_compute_instancespage()
instances_page.delete_instance(instance_name)
instances_page.find_message_and_dismiss(messages.SUCCESS)
self.assertFalse(
instances_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(instances_page.is_instance_deleted(instance_name))
class TestVolumesActions(helpers.TestCase):
VOLUME_NAME = helpers.gen_random_resource_name("volume")
IMAGE_NAME = helpers.gen_random_resource_name("image")
INSTANCE_NAME = helpers.gen_random_resource_name("instance")
def setUp(self):
super(TestVolumesActions, self).setUp()
self.volumes_page = self.home_pg.go_to_compute_volumes_volumespage()
self.volumes_page.create_volume(self.VOLUME_NAME)
self.assertTrue(
self.volumes_page.find_message_and_dismiss(messages.INFO))
self.assertFalse(
self.volumes_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(self.volumes_page.is_volume_present(self.VOLUME_NAME))
self.assertTrue(self.volumes_page.is_volume_status(self.VOLUME_NAME,
'Available'))
def cleanup():
self.volumes_page.delete_volume(self.VOLUME_NAME)
self.assertTrue(
self.volumes_page.find_message_and_dismiss(messages.SUCCESS))
self.assertFalse(
self.volumes_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(
self.volumes_page.is_volume_deleted(self.VOLUME_NAME))
self.addCleanup(cleanup)
def test_volume_extend(self):
"""This test case checks extend volume functionality:
Steps:
1. Check current volume size
2. Extend volume
3. Check that no Error messages present
4. Check that the volume is still in the list
5. Check that the volume size is changed
"""
orig_size = self.volumes_page.get_size(self.VOLUME_NAME)
self.volumes_page.extend_volume(self.VOLUME_NAME, orig_size + 1)
self.assertTrue(
self.volumes_page.find_message_and_dismiss(messages.INFO))
self.assertFalse(
self.volumes_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(self.volumes_page.is_volume_status(self.VOLUME_NAME,
'Available'))
new_size = self.volumes_page.get_size(self.VOLUME_NAME)
self.assertFalse(orig_size >= new_size)
@decorators.skip_because(bugs=['1584057'])
def test_volume_upload_to_image(self):
"""This test case checks upload volume to image functionality:
Steps:
1. Upload volume to image with some disk format
2. Check that image is created
3. Check that no Error messages present
4. Delete the image
5. Repeat actions for all disk formats
"""
self.volumes_page = self.home_pg.go_to_compute_volumes_volumespage()
all_formats = {"qcow2": u'QCOW2', "raw": u'Raw', "vdi": u'VDI',
"vmdk": u'VMDK'}
for disk_format in all_formats:
self.volumes_page.upload_volume_to_image(self.VOLUME_NAME,
self.IMAGE_NAME,
disk_format)
self.assertFalse(
self.volumes_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(self.volumes_page.is_volume_status(
self.VOLUME_NAME, 'Available'))
images_page = self.home_pg.go_to_compute_imagespage()
self.assertTrue(images_page.is_image_present(self.IMAGE_NAME))
self.assertTrue(images_page.is_image_active(self.IMAGE_NAME))
self.assertEqual(images_page.get_image_format(self.IMAGE_NAME),
all_formats[disk_format])
images_page.delete_image(self.IMAGE_NAME)
self.assertTrue(images_page.find_message_and_dismiss(
messages.SUCCESS))
self.assertFalse(images_page.find_message_and_dismiss(
messages.ERROR))
self.assertFalse(images_page.is_image_present(self.IMAGE_NAME))
self.volumes_page = \
self.home_pg.go_to_compute_volumes_volumespage()
def test_volume_launch_as_instance(self):
"""This test case checks launch volume as instance functionality:
Steps:
1. Launch volume as instance
2. Check that instance is created
3. Check that no Error messages present
4. Check that instance status is 'active'
5. Check that volume status is 'in use'
6. Delete instance
"""
self.volumes_page.launch_instance(self.VOLUME_NAME, self.INSTANCE_NAME)
self.assertTrue(
self.volumes_page.find_message_and_dismiss(messages.SUCCESS))
self.assertFalse(
self.volumes_page.find_message_and_dismiss(messages.ERROR))
instances_page = self.home_pg.go_to_compute_instancespage()
self.assertTrue(instances_page.is_instance_active(self.INSTANCE_NAME))
self.volumes_page = self.home_pg.go_to_compute_volumes_volumespage()
self.assertTrue(self.volumes_page.is_volume_status(self.VOLUME_NAME,
'In-use'))
self.assertIn(self.INSTANCE_NAME,
self.volumes_page.get_attach_instance(self.VOLUME_NAME))
instances_page = self.home_pg.go_to_compute_instancespage()
instances_page.delete_instance(self.INSTANCE_NAME)
self.assertTrue(
instances_page.find_message_and_dismiss(messages.SUCCESS))
self.assertFalse(
instances_page.find_message_and_dismiss(messages.ERROR))
self.assertTrue(instances_page.is_instance_deleted(self.INSTANCE_NAME))
self.volumes_page = self.home_pg.go_to_compute_volumes_volumespage()
|
apache-2.0
|
CapeSepias/owning-a-home
|
test/browser_testing/features/steps/steps_navigation.py
|
8
|
4553
|
# coding: utf-8
from behave import given, when, then
from hamcrest.core import assert_that, equal_to
from hamcrest.library.text.stringcontains import contains_string
from decorators import *
from pages.home import Home
from pages.base import Base
from pages.utils import Utils
# XPATH LOCATORS
# RELATIVE URL'S
HOME = 'index.html'
LC = 'loan-comparison'
LO = 'loan-options'
CONV = 'loan-options/conventional-loans'
ER = 'explore-rates'
FHA = 'loan-options/FHA-loans'
SPECIAL = 'loan-options/special-loan-programs'
# Journey links
KP = 'process'
PP = 'process/prepare'
PE = 'process/explore'
PC = 'process/compare'
PF = 'process/close'
PS = 'process/sources'
# FE
CD = 'closing-disclosure'
LE = 'loan-estimate'
# Form Resources
MC = 'mortgage-closing'
ME = 'mortgage-estimate'
@given(u'I navigate to the "{page_name}" page')
@handle_error
def step(context, page_name):
if (page_name == 'Owning a Home'):
context.base.go(HOME)
elif (page_name == 'Loan Comparison'):
context.base.go(LC)
elif (page_name == 'Loan Options'):
context.base.go(LO)
elif (page_name == 'Rate Checker'):
context.base.go(ER)
# Wait for the chart to load
context.base.sleep(2)
assert_that(context.rate_checker.is_chart_loaded(), equal_to("Chart is loaded"))
elif (page_name == 'Conventional Loan'):
context.base.go(CONV)
elif (page_name == 'FHA Loan'):
context.base.go(FHA)
elif (page_name == 'Special Loan Programs'):
context.base.go(SPECIAL)
elif (page_name == 'Know the Process'):
context.base.go(KP)
elif (page_name == 'Prepare to Shop'):
context.base.go(PP)
elif (page_name == 'Explore Loan Options'):
context.base.go(PE)
elif (page_name == 'Compare Loan Options'):
context.base.go(PC)
elif (page_name == 'Get Ready to Close'):
context.base.go(PF)
elif (page_name == 'Sources'):
context.base.go(PS)
elif (page_name == 'Closing Disclosure'):
context.base.go(CD)
elif (page_name == 'Loan Estimate'):
context.base.go(LE)
elif (page_name == 'Mortgage Closing'):
context.base.go(MC)
elif (page_name == 'Mortgage Estimate'):
context.base.go(ME)
else:
raise Exception(page_name + ' is NOT a valid page')
@given(u'I navigate to the OAH Landing page')
@handle_error
def step(context):
context.base.go()
@when(u'I click on the "{link_name}" link')
@handle_error
def step(context, link_name):
# Click the requested tab
context.navigation.click_link(link_name)
@when(u'I click on the link with id "{link_id}"')
@handle_error
def step(context, link_id):
# Click the requested tab
context.navigation.click_link_with_id(link_id)
@then(u'I should see "{link_name}" displayed in the page title')
@handle_error
def step(context, link_name):
# Verify that the page title matches the link we clicked
page_title = context.base.get_page_title()
assert_that(page_title, contains_string(link_name))
@then(u'I should see the page scroll to the "{page_anchor}" section')
@handle_error
def step(context, page_anchor):
current_url = context.base.get_current_url()
assert_that(current_url, contains_string(page_anchor))
@then(u'I should be directed to the internal "{relative_url}" URL')
@handle_error
def step(context, relative_url):
actual_url = context.base.get_current_url()
expected_url = context.utils.build_url(context.base_url, relative_url)
assert_that(actual_url, equal_to(expected_url))
@then(u'I should be directed to the external "{full_url}" URL')
@handle_error
def step(context, full_url):
actual_url = context.base.get_current_url()
assert_that(actual_url, contains_string(full_url))
@then(u'I should be directed to the OAH Landing page')
@handle_error
def step(context):
actual_url = context.base.get_current_url()
expected_url = context.utils.build_url(context.base_url, '/')
assert_that(actual_url, equal_to(expected_url))
@then(u'I should see the "{relative_url}" URL with page title {page_title} open in a new tab')
@handle_error
def step(context, relative_url, page_title):
title = context.base.switch_to_new_tab(relative_url)
assert_that(title, contains_string(page_title))
@then(u'Links are working without 404 errors')
def links_working_without_404s(context):
assert_that( context.navigation.check_links_for_404s(context.base_url),
equal_to([]),
'Broken links on <%s>' % context.base.get_current_url() )
|
cc0-1.0
|
konstruktoid/ansible-upstream
|
lib/ansible/module_utils/infinibox.py
|
135
|
3673
|
# -*- coding: utf-8 -*-
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Gregory Shulov <[email protected]>,2016
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
HAS_INFINISDK = True
try:
from infinisdk import InfiniBox, core
except ImportError:
HAS_INFINISDK = False
from functools import wraps
from os import environ
from os import path
def api_wrapper(func):
""" Catch API Errors Decorator"""
@wraps(func)
def __wrapper(*args, **kwargs):
module = args[0]
try:
return func(*args, **kwargs)
except core.exceptions.APICommandException as e:
module.fail_json(msg=e.message)
except core.exceptions.SystemNotFoundException as e:
module.fail_json(msg=e.message)
except:
raise
return __wrapper
@api_wrapper
def get_system(module):
"""Return System Object or Fail"""
box = module.params['system']
user = module.params.get('user', None)
password = module.params.get('password', None)
if user and password:
system = InfiniBox(box, auth=(user, password))
elif environ.get('INFINIBOX_USER') and environ.get('INFINIBOX_PASSWORD'):
system = InfiniBox(box, auth=(environ.get('INFINIBOX_USER'), environ.get('INFINIBOX_PASSWORD')))
elif path.isfile(path.expanduser('~') + '/.infinidat/infinisdk.ini'):
system = InfiniBox(box)
else:
module.fail_json(msg="You must set INFINIBOX_USER and INFINIBOX_PASSWORD environment variables or set username/password module arguments")
try:
system.login()
except Exception:
module.fail_json(msg="Infinibox authentication failed. Check your credentials")
return system
def infinibox_argument_spec():
"""Return standard base dictionary used for the argument_spec argument in AnsibleModule"""
return dict(
system=dict(required=True),
user=dict(),
password=dict(no_log=True),
)
def infinibox_required_together():
"""Return the default list used for the required_together argument to AnsibleModule"""
return [['user', 'password']]
|
gpl-3.0
|
cloudera/hue
|
desktop/core/ext-py/Django-1.11.29/django/core/management/commands/dumpdata.py
|
72
|
8692
|
import warnings
from collections import OrderedDict
from django.apps import apps
from django.core import serializers
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import parse_apps_and_model_labels
from django.db import DEFAULT_DB_ALIAS, router
class ProxyModelWarning(Warning):
pass
class Command(BaseCommand):
help = (
"Output the contents of the database as a fixture of the given format "
"(using each model's default manager unless --all is specified)."
)
def add_arguments(self, parser):
parser.add_argument(
'args', metavar='app_label[.ModelName]', nargs='*',
help='Restricts dumped data to the specified app_label or app_label.ModelName.',
)
parser.add_argument(
'--format', default='json', dest='format',
help='Specifies the output serialization format for fixtures.',
)
parser.add_argument(
'--indent', default=None, dest='indent', type=int,
help='Specifies the indent level to use when pretty-printing output.',
)
parser.add_argument(
'--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS,
help='Nominates a specific database to dump fixtures from. '
'Defaults to the "default" database.',
)
parser.add_argument(
'-e', '--exclude', dest='exclude', action='append', default=[],
help='An app_label or app_label.ModelName to exclude '
'(use multiple --exclude to exclude multiple apps/models).',
)
parser.add_argument(
'--natural-foreign', action='store_true', dest='use_natural_foreign_keys', default=False,
help='Use natural foreign keys if they are available.',
)
parser.add_argument(
'--natural-primary', action='store_true', dest='use_natural_primary_keys', default=False,
help='Use natural primary keys if they are available.',
)
parser.add_argument(
'-a', '--all', action='store_true', dest='use_base_manager', default=False,
help="Use Django's base manager to dump all models stored in the database, "
"including those that would otherwise be filtered or modified by a custom manager.",
)
parser.add_argument(
'--pks', dest='primary_keys',
help="Only dump objects with given primary keys. Accepts a comma-separated "
"list of keys. This option only works when you specify one model.",
)
parser.add_argument(
'-o', '--output', default=None, dest='output',
help='Specifies file to which the output is written.'
)
def handle(self, *app_labels, **options):
format = options['format']
indent = options['indent']
using = options['database']
excludes = options['exclude']
output = options['output']
show_traceback = options['traceback']
use_natural_foreign_keys = options['use_natural_foreign_keys']
use_natural_primary_keys = options['use_natural_primary_keys']
use_base_manager = options['use_base_manager']
pks = options['primary_keys']
if pks:
primary_keys = [pk.strip() for pk in pks.split(',')]
else:
primary_keys = []
excluded_models, excluded_apps = parse_apps_and_model_labels(excludes)
if len(app_labels) == 0:
if primary_keys:
raise CommandError("You can only use --pks option with one model")
app_list = OrderedDict(
(app_config, None) for app_config in apps.get_app_configs()
if app_config.models_module is not None and app_config not in excluded_apps
)
else:
if len(app_labels) > 1 and primary_keys:
raise CommandError("You can only use --pks option with one model")
app_list = OrderedDict()
for label in app_labels:
try:
app_label, model_label = label.split('.')
try:
app_config = apps.get_app_config(app_label)
except LookupError as e:
raise CommandError(str(e))
if app_config.models_module is None or app_config in excluded_apps:
continue
try:
model = app_config.get_model(model_label)
except LookupError:
raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
app_list_value = app_list.setdefault(app_config, [])
# We may have previously seen a "all-models" request for
# this app (no model qualifier was given). In this case
# there is no need adding specific models to the list.
if app_list_value is not None:
if model not in app_list_value:
app_list_value.append(model)
except ValueError:
if primary_keys:
raise CommandError("You can only use --pks option with one model")
# This is just an app - no model qualifier
app_label = label
try:
app_config = apps.get_app_config(app_label)
except LookupError as e:
raise CommandError(str(e))
if app_config.models_module is None or app_config in excluded_apps:
continue
app_list[app_config] = None
# Check that the serialization format exists; this is a shortcut to
# avoid collating all the objects and _then_ failing.
if format not in serializers.get_public_serializer_formats():
try:
serializers.get_serializer(format)
except serializers.SerializerDoesNotExist:
pass
raise CommandError("Unknown serialization format: %s" % format)
def get_objects(count_only=False):
"""
Collate the objects to be serialized. If count_only is True, just
count the number of objects to be serialized.
"""
models = serializers.sort_dependencies(app_list.items())
for model in models:
if model in excluded_models:
continue
if model._meta.proxy and model._meta.proxy_for_model not in models:
warnings.warn(
"%s is a proxy model and won't be serialized." % model._meta.label,
category=ProxyModelWarning,
)
if not model._meta.proxy and router.allow_migrate_model(using, model):
if use_base_manager:
objects = model._base_manager
else:
objects = model._default_manager
queryset = objects.using(using).order_by(model._meta.pk.name)
if primary_keys:
queryset = queryset.filter(pk__in=primary_keys)
if count_only:
yield queryset.order_by().count()
else:
for obj in queryset.iterator():
yield obj
try:
self.stdout.ending = None
progress_output = None
object_count = 0
# If dumpdata is outputting to stdout, there is no way to display progress
if (output and self.stdout.isatty() and options['verbosity'] > 0):
progress_output = self.stdout
object_count = sum(get_objects(count_only=True))
stream = open(output, 'w') if output else None
try:
serializers.serialize(
format, get_objects(), indent=indent,
use_natural_foreign_keys=use_natural_foreign_keys,
use_natural_primary_keys=use_natural_primary_keys,
stream=stream or self.stdout, progress_output=progress_output,
object_count=object_count,
)
finally:
if stream:
stream.close()
except Exception as e:
if show_traceback:
raise
raise CommandError("Unable to serialize database: %s" % e)
|
apache-2.0
|
intelligenia/django-cmsutils
|
cmsutils/search.py
|
3
|
2049
|
import re
from django.db.models import Q
def normalize_query(query_string,
findterms=re.compile(r'"([^"]+)"|(\S+)').findall,
normspace=re.compile(r'\s{2,}').sub):
''' Splits the query string in invidual keywords, getting rid of unecessary spaces
and grouping quoted words together.
Example:
>>> normalize_query(' some random words "with quotes " and spaces')
['some', 'random', 'words', 'with quotes', 'and', 'spaces']
'''
return [normspace(' ', (t[0] or t[1]).strip()) for t in findterms(query_string)]
def get_query(query_string, search_fields):
''' Returns a query, that is a combination of Q objects. That combination
aims to search keywords within a model by testing the given search fields.
'''
query = None # Query to search for every search term
terms = normalize_query(query_string)
for term in terms:
or_query = None # Query to search for a given term in each field
for field_name in search_fields:
q = Q(**{"%s__icontains" % field_name: term})
if or_query is None:
or_query = q
else:
or_query = or_query | q
if query is None:
query = or_query
else:
query = query & or_query
return query
'''
Example to use this code. Your search view would become as simple as:
http://www.julienphalip.com/blog/2008/08/16/adding-search-django-site-snap/
def search(request):
query_string = ''
found_entries = None
if ('q' in request.GET) and request.GET['q'].strip():
query_string = request.GET['q']
entry_query = get_query(query_string, ['title', 'body',])
found_entries = Entry.objects.filter(entry_query).order_by('-pub_date')
return render_to_response('search/search_results.html',
{ 'query_string': query_string, 'found_entries': found_entries },
context_instance=RequestContext(request))
'''
|
lgpl-3.0
|
jkyeung/XlsxWriter
|
xlsxwriter/test/comparison/test_autofilter07.py
|
1
|
2715
|
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2016, John McNamara, [email protected]
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'autofilter07.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.txt_filename = test_dir + 'xlsx_files/' + 'autofilter_data.txt'
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""
Test the creation of a simple XlsxWriter file with an autofilter.
Test autofilters where column filter ids are relative to autofilter
range.
"""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
# Set the autofilter.
worksheet.autofilter('D3:G53')
# Add filter criteria.
worksheet.filter_column('D', 'region == East')
# Open a text file with autofilter example data.
textfile = open(self.txt_filename)
# Read the headers from the first line of the input file.
headers = textfile.readline().strip("\n").split()
# Write out the headers.
worksheet.write_row('D3', headers)
# Start writing data after the headers.
row = 3
# Read the rest of the text file and write it to the worksheet.
for line in textfile:
# Split the input data based on whitespace.
data = line.strip("\n").split()
# Convert the number data from the text file.
for i, item in enumerate(data):
try:
data[i] = float(item)
except ValueError:
pass
# Get some of the field data.
region = data[0]
# Check for rows that match the filter.
if region == 'East':
# Row matches the filter, no further action required.
pass
else:
# We need to hide rows that don't match the filter.
worksheet.set_row(row, options={'hidden': True})
# Write out the row data.
worksheet.write_row(row, 3, data)
# Move on to the next worksheet row.
row += 1
textfile.close()
workbook.close()
self.assertExcelEqual()
|
bsd-2-clause
|
AICP/external_chromium_org
|
tools/telemetry/telemetry/timeline/counter.py
|
8
|
2400
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import telemetry.timeline.event_container as event_container
# Doesn't inherit from TimelineEvent because its only a temporary wrapper of a
# counter sample into an event. During stable operation, the samples are stored
# a dense array of values rather than in the long-form done by an Event.
class CounterSample(object):
def __init__(self, counter, sample_index):
self._counter = counter
self._sample_index = sample_index
@property
def name(self):
return None
@property
def start(self):
return self._counter.timestamps[self._sample_index]
@start.setter
def start(self, start):
self._counter.timestamps[self._sample_index] = start
@property
def duration(self):
return 0
@property
def end(self):
return self.start
@property
def thread_start(self):
return None
@property
def thread_duration(self):
return None
@property
def thread_end(self):
return None
class Counter(event_container.TimelineEventContainer):
""" Stores all the samples for a given counter.
"""
def __init__(self, parent, category, name):
super(Counter, self).__init__(name, parent)
self.category = category
self.full_name = category + '.' + name
self.samples = []
self.timestamps = []
self.series_names = []
self.totals = []
self.max_total = 0
def IterChildContainers(self):
return iter([])
def IterEventsInThisContainer(self):
for i in range(len(self.timestamps)):
yield CounterSample(self, i)
@property
def num_series(self):
return len(self.series_names)
@property
def num_samples(self):
return len(self.timestamps)
def FinalizeImport(self):
if self.num_series * self.num_samples != len(self.samples):
raise ValueError(
'Length of samples must be a multiple of length of timestamps.')
self.totals = []
self.max_total = 0
if not len(self.samples):
return
max_total = None
for i in xrange(self.num_samples):
total = 0
for j in xrange(self.num_series):
total += self.samples[i * self.num_series + j]
self.totals.append(total)
if max_total is None or total > max_total:
max_total = total
self.max_total = max_total
|
bsd-3-clause
|
popazerty/beyonwiz-4.1
|
lib/python/Components/Renderer/PositionGauge.py
|
132
|
1892
|
from Renderer import Renderer
from enigma import ePositionGauge
class PositionGauge(Renderer):
def __init__(self):
Renderer.__init__(self)
self.__position = 0
self.__seek_position = 0
self.__length = 0
self.__seek_enable = 0
self.__cutlist = [ ]
GUI_WIDGET = ePositionGauge
def postWidgetCreate(self, instance):
self.changed((self.CHANGED_DEFAULT,))
self.cutlist_changed()
instance.setInOutList(self.__cutlist)
def changed(self, what):
if what[0] == self.CHANGED_CLEAR:
(self.length, self.position) = 0
else:
(self.length, self.position) = (self.source.length or 0, self.source.position or 0)
def cutlist_changed(self):
self.cutlist = self.source.cutlist or [ ]
def getPosition(self):
return self.__position
def setPosition(self, pos):
self.__position = pos
if self.instance is not None:
self.instance.setPosition(pos)
position = property(getPosition, setPosition)
def getLength(self):
return self.__length
def setLength(self, len):
self.__length = len
if self.instance is not None:
self.instance.setLength(len)
length = property(getLength, setLength)
def getCutlist(self):
return self.__cutlist
def setCutlist(self, cutlist):
if self.__cutlist != cutlist:
self.__cutlist = cutlist
if self.instance is not None:
self.instance.setInOutList(cutlist)
cutlist = property(getCutlist, setCutlist)
def getSeekEnable(self):
return self.__seek_enable
def setSeekEnable(self, val):
self.__seek_enable = val
if self.instance is not None:
self.instance.enableSeekPointer(val)
seek_pointer_enabled = property(getSeekEnable, setSeekEnable)
def getSeekPosition(self):
return self.__seek_position
def setSeekPosition(self, pos):
self.__seek_position = pos
if self.instance is not None:
self.instance.setSeekPosition(pos)
seek_pointer_position = property(getSeekPosition, setSeekPosition)
|
gpl-2.0
|
krafczyk/spack
|
var/spack/repos/builtin/packages/r-prodlim/package.py
|
5
|
2024
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RProdlim(RPackage):
"""Product-Limit Estimation for Censored Event History Analysis. Fast and
user friendly implementation of nonparametric estimators for censored event
history (survival) analysis. Kaplan-Meier and Aalen-Johansen method."""
homepage = "https://cran.r-project.org/package=prodlim"
url = "https://cran.r-project.org/src/contrib/prodlim_1.5.9.tar.gz"
list_url = "https://cran.r-project.org/src/contrib/Archive/prodlim"
version('1.5.9', 'e0843053c9270e41b657a733d6675dc9')
depends_on('[email protected]:')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('r-survival', type=('build', 'run'))
depends_on('r-kernsmooth', type=('build', 'run'))
depends_on('r-lava', type=('build', 'run'))
|
lgpl-2.1
|
protagora/device_network_protocol
|
middleware/config/config.py
|
1
|
8717
|
import os
from dna.middleware.util.format import ConfigurationFormatFactory
class ConfigurationException(Exception):
pass
class Configuration(object):
# file write formats: json, yaml, xml...
FORMAT_JSON = ConfigurationFormatFactory.FORMAT_JSON
ERROR_READING_CONFIG = "Error reading configuration"
ERROR_WRITING_CONFIG = "Error writing configuration"
ERROR_NOT_EXISTS = "Configuration path doesn't exist"
ERROR_NOT_FILE = "Configuration path is not a file"
ERROR_NOT_WRITABLE = "Configuration path not writable"
ERROR_SAVING = "Error saving configuration"
ERROR_KEY = "Path not traversable, key not found: {}"
ERROR_NOT_FOUND = "Not found"
ERROR_SETTING_KEY = "Setting key '{}' to value '{}' failed"
ERROR_INITIALIZING_CONFIG = "Error initializing configuration file"
def read(self):
pass
def write(self, configuration=None):
pass
class JsonConfiguration(Configuration):
DEFAULT_FORMAT = ConfigurationFormatFactory.FORMAT_JSON
DEFAULT_CONFIG = "{}"
def __init__(self, path, config_format=None):
self.path = path
if config_format:
self.format = config_format
else:
self.format = self.DEFAULT_FORMAT
self.configuration_factory = ConfigurationFormatFactory()
self.codec = self.configuration_factory.create(config_format=self.format)
self.__configuration = None
self._test()
@property
def configuration(self):
if self.__configuration is None:
self.read()
return self.__configuration
@configuration.setter
def configuration(self, configuration=None):
self.__configuration = configuration
def save(self):
try:
self.write()
except:
raise ConfigurationException(self.ERROR_SAVING)
def _test(self):
try:
assert os.path.exists(self.path)
except:
raise ConfigurationException(self.ERROR_NOT_EXISTS)
try:
assert os.path.isfile(self.path)
except:
raise ConfigurationException(self.ERROR_NOT_FILE)
try:
with open(self.path, 'r') as handle:
data = handle.read()
if not data:
with open(self.path, 'w') as handle:
handle.write(self.DEFAULT_CONFIG)
except:
raise ConfigurationException(self.ERROR_INITIALIZING_CONFIG)
def get(self, key=None, path=None):
data = self.configuration
if path is not None:
for item in path:
try:
data = data[item]
except:
raise ConfigurationException(self.ERROR_KEY.format(item))
try:
return data[key]
except:
raise ConfigurationException(self.ERROR_NOT_FOUND)
def set(self, key=None, value=None, path=None):
data = self.configuration
if path is not None:
for item in path:
try:
if isinstance(data, dict):
if item not in data.keys():
data[item] = dict()
elif isinstance(data, list):
if isinstance(item, int) and item >= 0:
if item > len(data) - 1:
for index in range(len(data), item + 1):
data.append(dict())
data = data[item]
except:
raise ConfigurationException(self.ERROR_KEY.format(item))
try:
data[key] = value
except:
raise ConfigurationException(self.ERROR_SETTING_KEY.format(key, value))
def read(self):
try:
with open(self.path, 'r') as handle:
self.configuration = self.codec.decode(handle.read())
except:
raise ConfigurationException(self.ERROR_READING_CONFIG)
return self.configuration
def write(self, configuration=None):
try:
assert configuration is not None
self.configuration = configuration
except (BaseException, ):
pass
try:
with open(self.path, 'w') as handle:
handle.write(self.codec.encode(obj=self.configuration))
except (BaseException, ):
raise ConfigurationException(self.ERROR_WRITING_CONFIG)
class EntityConfiguration(object):
def __init__(self, configuration_file=None):
try:
assert os.path.exists(configuration_file)
assert os.path.isfile(configuration_file)
except (BaseException, ):
raise ConfigurationException("Configuration file not found")
self.configuration_file = configuration_file
try:
with open(self.configuration_file, 'r') as handle:
self.configuration = handle.read()
except (BaseException, ):
raise ConfigurationException("Error reading configuration file: {}".format(self.configuration_file))
def get(self, key=None, path=None):
_configuration = self.configuration
for step in path:
try:
_configuration = _configuration[step]
except (BaseException, ):
return None
try:
return _configuration[key]
except (BaseException, ):
return None
def set(self, key=None, value=None, path=None):
_configuration = self.configuration
for step in path:
if isinstance(_configuration, dict):
try:
step = str(step)
except (BaseException, ):
ConfigurationException("Path may only contain strings and integers, {} encountered".format(type(step)))
if step not in _configuration.keys():
_configuration[step] = dict()
else:
if isinstance(_configuration, list):
try:
step = int(step)
except (BaseException, ):
ConfigurationException("Path may only contain strings and integers, {} encountered".format(type(step)))
if step > len(_configuration):
while len(_configuration) - 1 < step:
_configuration.append(dict())
_configuration[step] = dict()
else:
return False
_configuration[key] = value
return True
def save(self):
try:
with open(self.configuration_file, 'w') as handle:
handle.write(self.configuration)
except (BaseException, ):
ConfigurationException("Error saving configuration file")
class ServiceConfiguration(EntityConfiguration):
COMPONENT = ['id', 'status', 'name', 'description', 'resource_table', 'init_function', 'config']
RESOURCE = ['id', 'status', 'name', 'description', 'read_function', 'write_function', 'init_function', 'config']
def get_component(self, _id=None):
pass
def add_component(self, data=None):
pass
def get_resource(self, _id=None):
pass
def add_resource(self, data=None):
pass
def get(self, entity=None, _id=None):
pass
def set(self, entity=None, _id=None):
pass
def save(self):
pass
class ComponentConfiguration(EntityConfiguration):
pass
class ResourceConfiguration(EntityConfiguration):
pass
# # # TEST # # #
if "__main__" == __name__:
# from dna.settings import *
# _path = "{}{}".format(BASE_PATH, '/config/device.json')
# print(_path)
# simple_config = JsonConfiguration(path=_path)
# __configuration = simple_config.read()
#
# # explicit by write
# __configuration["version"] = "1.1.7"
# simple_config.write(configuration=__configuration)
#
# print(__configuration)
#
# # implicit by save
# try:
# print("configuration value (pre): " + simple_config.get(key="key", path=["path", "test", 0]))
# except (BaseException, ):
# pass
# simple_config.set(key="key", value="new value", path=["path", "test", 0])
# print("configuration value (post): " + simple_config.get(key="key", path=["path", "test", 0]))
# simple_config.set(key="key1", value="new value 1", path=["path 3", "test 4", 7])
# simple_config.set(key="key", value="new value 1", path=["path", "test", 7])
# print("new: " + simple_config.get(key="key1", path=["path 3", "test 4", 7]))
# simple_config.save()
pass
|
mit
|
goldeneye-source/ges-python
|
lib/msilib/schema.py
|
90
|
81583
|
from . import Table
_Validation = Table('_Validation')
_Validation.add_field(1,'Table',11552)
_Validation.add_field(2,'Column',11552)
_Validation.add_field(3,'Nullable',3332)
_Validation.add_field(4,'MinValue',4356)
_Validation.add_field(5,'MaxValue',4356)
_Validation.add_field(6,'KeyTable',7679)
_Validation.add_field(7,'KeyColumn',5378)
_Validation.add_field(8,'Category',7456)
_Validation.add_field(9,'Set',7679)
_Validation.add_field(10,'Description',7679)
ActionText = Table('ActionText')
ActionText.add_field(1,'Action',11592)
ActionText.add_field(2,'Description',7936)
ActionText.add_field(3,'Template',7936)
AdminExecuteSequence = Table('AdminExecuteSequence')
AdminExecuteSequence.add_field(1,'Action',11592)
AdminExecuteSequence.add_field(2,'Condition',7679)
AdminExecuteSequence.add_field(3,'Sequence',5378)
Condition = Table('Condition')
Condition.add_field(1,'Feature_',11558)
Condition.add_field(2,'Level',9474)
Condition.add_field(3,'Condition',7679)
AdminUISequence = Table('AdminUISequence')
AdminUISequence.add_field(1,'Action',11592)
AdminUISequence.add_field(2,'Condition',7679)
AdminUISequence.add_field(3,'Sequence',5378)
AdvtExecuteSequence = Table('AdvtExecuteSequence')
AdvtExecuteSequence.add_field(1,'Action',11592)
AdvtExecuteSequence.add_field(2,'Condition',7679)
AdvtExecuteSequence.add_field(3,'Sequence',5378)
AdvtUISequence = Table('AdvtUISequence')
AdvtUISequence.add_field(1,'Action',11592)
AdvtUISequence.add_field(2,'Condition',7679)
AdvtUISequence.add_field(3,'Sequence',5378)
AppId = Table('AppId')
AppId.add_field(1,'AppId',11558)
AppId.add_field(2,'RemoteServerName',7679)
AppId.add_field(3,'LocalService',7679)
AppId.add_field(4,'ServiceParameters',7679)
AppId.add_field(5,'DllSurrogate',7679)
AppId.add_field(6,'ActivateAtStorage',5378)
AppId.add_field(7,'RunAsInteractiveUser',5378)
AppSearch = Table('AppSearch')
AppSearch.add_field(1,'Property',11592)
AppSearch.add_field(2,'Signature_',11592)
Property = Table('Property')
Property.add_field(1,'Property',11592)
Property.add_field(2,'Value',3840)
BBControl = Table('BBControl')
BBControl.add_field(1,'Billboard_',11570)
BBControl.add_field(2,'BBControl',11570)
BBControl.add_field(3,'Type',3378)
BBControl.add_field(4,'X',1282)
BBControl.add_field(5,'Y',1282)
BBControl.add_field(6,'Width',1282)
BBControl.add_field(7,'Height',1282)
BBControl.add_field(8,'Attributes',4356)
BBControl.add_field(9,'Text',7986)
Billboard = Table('Billboard')
Billboard.add_field(1,'Billboard',11570)
Billboard.add_field(2,'Feature_',3366)
Billboard.add_field(3,'Action',7474)
Billboard.add_field(4,'Ordering',5378)
Feature = Table('Feature')
Feature.add_field(1,'Feature',11558)
Feature.add_field(2,'Feature_Parent',7462)
Feature.add_field(3,'Title',8000)
Feature.add_field(4,'Description',8191)
Feature.add_field(5,'Display',5378)
Feature.add_field(6,'Level',1282)
Feature.add_field(7,'Directory_',7496)
Feature.add_field(8,'Attributes',1282)
Binary = Table('Binary')
Binary.add_field(1,'Name',11592)
Binary.add_field(2,'Data',2304)
BindImage = Table('BindImage')
BindImage.add_field(1,'File_',11592)
BindImage.add_field(2,'Path',7679)
File = Table('File')
File.add_field(1,'File',11592)
File.add_field(2,'Component_',3400)
File.add_field(3,'FileName',4095)
File.add_field(4,'FileSize',260)
File.add_field(5,'Version',7496)
File.add_field(6,'Language',7444)
File.add_field(7,'Attributes',5378)
File.add_field(8,'Sequence',1282)
CCPSearch = Table('CCPSearch')
CCPSearch.add_field(1,'Signature_',11592)
CheckBox = Table('CheckBox')
CheckBox.add_field(1,'Property',11592)
CheckBox.add_field(2,'Value',7488)
Class = Table('Class')
Class.add_field(1,'CLSID',11558)
Class.add_field(2,'Context',11552)
Class.add_field(3,'Component_',11592)
Class.add_field(4,'ProgId_Default',7679)
Class.add_field(5,'Description',8191)
Class.add_field(6,'AppId_',7462)
Class.add_field(7,'FileTypeMask',7679)
Class.add_field(8,'Icon_',7496)
Class.add_field(9,'IconIndex',5378)
Class.add_field(10,'DefInprocHandler',7456)
Class.add_field(11,'Argument',7679)
Class.add_field(12,'Feature_',3366)
Class.add_field(13,'Attributes',5378)
Component = Table('Component')
Component.add_field(1,'Component',11592)
Component.add_field(2,'ComponentId',7462)
Component.add_field(3,'Directory_',3400)
Component.add_field(4,'Attributes',1282)
Component.add_field(5,'Condition',7679)
Component.add_field(6,'KeyPath',7496)
Icon = Table('Icon')
Icon.add_field(1,'Name',11592)
Icon.add_field(2,'Data',2304)
ProgId = Table('ProgId')
ProgId.add_field(1,'ProgId',11775)
ProgId.add_field(2,'ProgId_Parent',7679)
ProgId.add_field(3,'Class_',7462)
ProgId.add_field(4,'Description',8191)
ProgId.add_field(5,'Icon_',7496)
ProgId.add_field(6,'IconIndex',5378)
ComboBox = Table('ComboBox')
ComboBox.add_field(1,'Property',11592)
ComboBox.add_field(2,'Order',9474)
ComboBox.add_field(3,'Value',3392)
ComboBox.add_field(4,'Text',8000)
CompLocator = Table('CompLocator')
CompLocator.add_field(1,'Signature_',11592)
CompLocator.add_field(2,'ComponentId',3366)
CompLocator.add_field(3,'Type',5378)
Complus = Table('Complus')
Complus.add_field(1,'Component_',11592)
Complus.add_field(2,'ExpType',13570)
Directory = Table('Directory')
Directory.add_field(1,'Directory',11592)
Directory.add_field(2,'Directory_Parent',7496)
Directory.add_field(3,'DefaultDir',4095)
Control = Table('Control')
Control.add_field(1,'Dialog_',11592)
Control.add_field(2,'Control',11570)
Control.add_field(3,'Type',3348)
Control.add_field(4,'X',1282)
Control.add_field(5,'Y',1282)
Control.add_field(6,'Width',1282)
Control.add_field(7,'Height',1282)
Control.add_field(8,'Attributes',4356)
Control.add_field(9,'Property',7474)
Control.add_field(10,'Text',7936)
Control.add_field(11,'Control_Next',7474)
Control.add_field(12,'Help',7986)
Dialog = Table('Dialog')
Dialog.add_field(1,'Dialog',11592)
Dialog.add_field(2,'HCentering',1282)
Dialog.add_field(3,'VCentering',1282)
Dialog.add_field(4,'Width',1282)
Dialog.add_field(5,'Height',1282)
Dialog.add_field(6,'Attributes',4356)
Dialog.add_field(7,'Title',8064)
Dialog.add_field(8,'Control_First',3378)
Dialog.add_field(9,'Control_Default',7474)
Dialog.add_field(10,'Control_Cancel',7474)
ControlCondition = Table('ControlCondition')
ControlCondition.add_field(1,'Dialog_',11592)
ControlCondition.add_field(2,'Control_',11570)
ControlCondition.add_field(3,'Action',11570)
ControlCondition.add_field(4,'Condition',11775)
ControlEvent = Table('ControlEvent')
ControlEvent.add_field(1,'Dialog_',11592)
ControlEvent.add_field(2,'Control_',11570)
ControlEvent.add_field(3,'Event',11570)
ControlEvent.add_field(4,'Argument',11775)
ControlEvent.add_field(5,'Condition',15871)
ControlEvent.add_field(6,'Ordering',5378)
CreateFolder = Table('CreateFolder')
CreateFolder.add_field(1,'Directory_',11592)
CreateFolder.add_field(2,'Component_',11592)
CustomAction = Table('CustomAction')
CustomAction.add_field(1,'Action',11592)
CustomAction.add_field(2,'Type',1282)
CustomAction.add_field(3,'Source',7496)
CustomAction.add_field(4,'Target',7679)
DrLocator = Table('DrLocator')
DrLocator.add_field(1,'Signature_',11592)
DrLocator.add_field(2,'Parent',15688)
DrLocator.add_field(3,'Path',15871)
DrLocator.add_field(4,'Depth',5378)
DuplicateFile = Table('DuplicateFile')
DuplicateFile.add_field(1,'FileKey',11592)
DuplicateFile.add_field(2,'Component_',3400)
DuplicateFile.add_field(3,'File_',3400)
DuplicateFile.add_field(4,'DestName',8191)
DuplicateFile.add_field(5,'DestFolder',7496)
Environment = Table('Environment')
Environment.add_field(1,'Environment',11592)
Environment.add_field(2,'Name',4095)
Environment.add_field(3,'Value',8191)
Environment.add_field(4,'Component_',3400)
Error = Table('Error')
Error.add_field(1,'Error',9474)
Error.add_field(2,'Message',7936)
EventMapping = Table('EventMapping')
EventMapping.add_field(1,'Dialog_',11592)
EventMapping.add_field(2,'Control_',11570)
EventMapping.add_field(3,'Event',11570)
EventMapping.add_field(4,'Attribute',3378)
Extension = Table('Extension')
Extension.add_field(1,'Extension',11775)
Extension.add_field(2,'Component_',11592)
Extension.add_field(3,'ProgId_',7679)
Extension.add_field(4,'MIME_',7488)
Extension.add_field(5,'Feature_',3366)
MIME = Table('MIME')
MIME.add_field(1,'ContentType',11584)
MIME.add_field(2,'Extension_',3583)
MIME.add_field(3,'CLSID',7462)
FeatureComponents = Table('FeatureComponents')
FeatureComponents.add_field(1,'Feature_',11558)
FeatureComponents.add_field(2,'Component_',11592)
FileSFPCatalog = Table('FileSFPCatalog')
FileSFPCatalog.add_field(1,'File_',11592)
FileSFPCatalog.add_field(2,'SFPCatalog_',11775)
SFPCatalog = Table('SFPCatalog')
SFPCatalog.add_field(1,'SFPCatalog',11775)
SFPCatalog.add_field(2,'Catalog',2304)
SFPCatalog.add_field(3,'Dependency',7424)
Font = Table('Font')
Font.add_field(1,'File_',11592)
Font.add_field(2,'FontTitle',7552)
IniFile = Table('IniFile')
IniFile.add_field(1,'IniFile',11592)
IniFile.add_field(2,'FileName',4095)
IniFile.add_field(3,'DirProperty',7496)
IniFile.add_field(4,'Section',3936)
IniFile.add_field(5,'Key',3968)
IniFile.add_field(6,'Value',4095)
IniFile.add_field(7,'Action',1282)
IniFile.add_field(8,'Component_',3400)
IniLocator = Table('IniLocator')
IniLocator.add_field(1,'Signature_',11592)
IniLocator.add_field(2,'FileName',3583)
IniLocator.add_field(3,'Section',3424)
IniLocator.add_field(4,'Key',3456)
IniLocator.add_field(5,'Field',5378)
IniLocator.add_field(6,'Type',5378)
InstallExecuteSequence = Table('InstallExecuteSequence')
InstallExecuteSequence.add_field(1,'Action',11592)
InstallExecuteSequence.add_field(2,'Condition',7679)
InstallExecuteSequence.add_field(3,'Sequence',5378)
InstallUISequence = Table('InstallUISequence')
InstallUISequence.add_field(1,'Action',11592)
InstallUISequence.add_field(2,'Condition',7679)
InstallUISequence.add_field(3,'Sequence',5378)
IsolatedComponent = Table('IsolatedComponent')
IsolatedComponent.add_field(1,'Component_Shared',11592)
IsolatedComponent.add_field(2,'Component_Application',11592)
LaunchCondition = Table('LaunchCondition')
LaunchCondition.add_field(1,'Condition',11775)
LaunchCondition.add_field(2,'Description',4095)
ListBox = Table('ListBox')
ListBox.add_field(1,'Property',11592)
ListBox.add_field(2,'Order',9474)
ListBox.add_field(3,'Value',3392)
ListBox.add_field(4,'Text',8000)
ListView = Table('ListView')
ListView.add_field(1,'Property',11592)
ListView.add_field(2,'Order',9474)
ListView.add_field(3,'Value',3392)
ListView.add_field(4,'Text',8000)
ListView.add_field(5,'Binary_',7496)
LockPermissions = Table('LockPermissions')
LockPermissions.add_field(1,'LockObject',11592)
LockPermissions.add_field(2,'Table',11552)
LockPermissions.add_field(3,'Domain',15871)
LockPermissions.add_field(4,'User',11775)
LockPermissions.add_field(5,'Permission',4356)
Media = Table('Media')
Media.add_field(1,'DiskId',9474)
Media.add_field(2,'LastSequence',1282)
Media.add_field(3,'DiskPrompt',8000)
Media.add_field(4,'Cabinet',7679)
Media.add_field(5,'VolumeLabel',7456)
Media.add_field(6,'Source',7496)
MoveFile = Table('MoveFile')
MoveFile.add_field(1,'FileKey',11592)
MoveFile.add_field(2,'Component_',3400)
MoveFile.add_field(3,'SourceName',8191)
MoveFile.add_field(4,'DestName',8191)
MoveFile.add_field(5,'SourceFolder',7496)
MoveFile.add_field(6,'DestFolder',3400)
MoveFile.add_field(7,'Options',1282)
MsiAssembly = Table('MsiAssembly')
MsiAssembly.add_field(1,'Component_',11592)
MsiAssembly.add_field(2,'Feature_',3366)
MsiAssembly.add_field(3,'File_Manifest',7496)
MsiAssembly.add_field(4,'File_Application',7496)
MsiAssembly.add_field(5,'Attributes',5378)
MsiAssemblyName = Table('MsiAssemblyName')
MsiAssemblyName.add_field(1,'Component_',11592)
MsiAssemblyName.add_field(2,'Name',11775)
MsiAssemblyName.add_field(3,'Value',3583)
MsiDigitalCertificate = Table('MsiDigitalCertificate')
MsiDigitalCertificate.add_field(1,'DigitalCertificate',11592)
MsiDigitalCertificate.add_field(2,'CertData',2304)
MsiDigitalSignature = Table('MsiDigitalSignature')
MsiDigitalSignature.add_field(1,'Table',11552)
MsiDigitalSignature.add_field(2,'SignObject',11592)
MsiDigitalSignature.add_field(3,'DigitalCertificate_',3400)
MsiDigitalSignature.add_field(4,'Hash',6400)
MsiFileHash = Table('MsiFileHash')
MsiFileHash.add_field(1,'File_',11592)
MsiFileHash.add_field(2,'Options',1282)
MsiFileHash.add_field(3,'HashPart1',260)
MsiFileHash.add_field(4,'HashPart2',260)
MsiFileHash.add_field(5,'HashPart3',260)
MsiFileHash.add_field(6,'HashPart4',260)
MsiPatchHeaders = Table('MsiPatchHeaders')
MsiPatchHeaders.add_field(1,'StreamRef',11558)
MsiPatchHeaders.add_field(2,'Header',2304)
ODBCAttribute = Table('ODBCAttribute')
ODBCAttribute.add_field(1,'Driver_',11592)
ODBCAttribute.add_field(2,'Attribute',11560)
ODBCAttribute.add_field(3,'Value',8191)
ODBCDriver = Table('ODBCDriver')
ODBCDriver.add_field(1,'Driver',11592)
ODBCDriver.add_field(2,'Component_',3400)
ODBCDriver.add_field(3,'Description',3583)
ODBCDriver.add_field(4,'File_',3400)
ODBCDriver.add_field(5,'File_Setup',7496)
ODBCDataSource = Table('ODBCDataSource')
ODBCDataSource.add_field(1,'DataSource',11592)
ODBCDataSource.add_field(2,'Component_',3400)
ODBCDataSource.add_field(3,'Description',3583)
ODBCDataSource.add_field(4,'DriverDescription',3583)
ODBCDataSource.add_field(5,'Registration',1282)
ODBCSourceAttribute = Table('ODBCSourceAttribute')
ODBCSourceAttribute.add_field(1,'DataSource_',11592)
ODBCSourceAttribute.add_field(2,'Attribute',11552)
ODBCSourceAttribute.add_field(3,'Value',8191)
ODBCTranslator = Table('ODBCTranslator')
ODBCTranslator.add_field(1,'Translator',11592)
ODBCTranslator.add_field(2,'Component_',3400)
ODBCTranslator.add_field(3,'Description',3583)
ODBCTranslator.add_field(4,'File_',3400)
ODBCTranslator.add_field(5,'File_Setup',7496)
Patch = Table('Patch')
Patch.add_field(1,'File_',11592)
Patch.add_field(2,'Sequence',9474)
Patch.add_field(3,'PatchSize',260)
Patch.add_field(4,'Attributes',1282)
Patch.add_field(5,'Header',6400)
Patch.add_field(6,'StreamRef_',7462)
PatchPackage = Table('PatchPackage')
PatchPackage.add_field(1,'PatchId',11558)
PatchPackage.add_field(2,'Media_',1282)
PublishComponent = Table('PublishComponent')
PublishComponent.add_field(1,'ComponentId',11558)
PublishComponent.add_field(2,'Qualifier',11775)
PublishComponent.add_field(3,'Component_',11592)
PublishComponent.add_field(4,'AppData',8191)
PublishComponent.add_field(5,'Feature_',3366)
RadioButton = Table('RadioButton')
RadioButton.add_field(1,'Property',11592)
RadioButton.add_field(2,'Order',9474)
RadioButton.add_field(3,'Value',3392)
RadioButton.add_field(4,'X',1282)
RadioButton.add_field(5,'Y',1282)
RadioButton.add_field(6,'Width',1282)
RadioButton.add_field(7,'Height',1282)
RadioButton.add_field(8,'Text',8000)
RadioButton.add_field(9,'Help',7986)
Registry = Table('Registry')
Registry.add_field(1,'Registry',11592)
Registry.add_field(2,'Root',1282)
Registry.add_field(3,'Key',4095)
Registry.add_field(4,'Name',8191)
Registry.add_field(5,'Value',7936)
Registry.add_field(6,'Component_',3400)
RegLocator = Table('RegLocator')
RegLocator.add_field(1,'Signature_',11592)
RegLocator.add_field(2,'Root',1282)
RegLocator.add_field(3,'Key',3583)
RegLocator.add_field(4,'Name',7679)
RegLocator.add_field(5,'Type',5378)
RemoveFile = Table('RemoveFile')
RemoveFile.add_field(1,'FileKey',11592)
RemoveFile.add_field(2,'Component_',3400)
RemoveFile.add_field(3,'FileName',8191)
RemoveFile.add_field(4,'DirProperty',3400)
RemoveFile.add_field(5,'InstallMode',1282)
RemoveIniFile = Table('RemoveIniFile')
RemoveIniFile.add_field(1,'RemoveIniFile',11592)
RemoveIniFile.add_field(2,'FileName',4095)
RemoveIniFile.add_field(3,'DirProperty',7496)
RemoveIniFile.add_field(4,'Section',3936)
RemoveIniFile.add_field(5,'Key',3968)
RemoveIniFile.add_field(6,'Value',8191)
RemoveIniFile.add_field(7,'Action',1282)
RemoveIniFile.add_field(8,'Component_',3400)
RemoveRegistry = Table('RemoveRegistry')
RemoveRegistry.add_field(1,'RemoveRegistry',11592)
RemoveRegistry.add_field(2,'Root',1282)
RemoveRegistry.add_field(3,'Key',4095)
RemoveRegistry.add_field(4,'Name',8191)
RemoveRegistry.add_field(5,'Component_',3400)
ReserveCost = Table('ReserveCost')
ReserveCost.add_field(1,'ReserveKey',11592)
ReserveCost.add_field(2,'Component_',3400)
ReserveCost.add_field(3,'ReserveFolder',7496)
ReserveCost.add_field(4,'ReserveLocal',260)
ReserveCost.add_field(5,'ReserveSource',260)
SelfReg = Table('SelfReg')
SelfReg.add_field(1,'File_',11592)
SelfReg.add_field(2,'Cost',5378)
ServiceControl = Table('ServiceControl')
ServiceControl.add_field(1,'ServiceControl',11592)
ServiceControl.add_field(2,'Name',4095)
ServiceControl.add_field(3,'Event',1282)
ServiceControl.add_field(4,'Arguments',8191)
ServiceControl.add_field(5,'Wait',5378)
ServiceControl.add_field(6,'Component_',3400)
ServiceInstall = Table('ServiceInstall')
ServiceInstall.add_field(1,'ServiceInstall',11592)
ServiceInstall.add_field(2,'Name',3583)
ServiceInstall.add_field(3,'DisplayName',8191)
ServiceInstall.add_field(4,'ServiceType',260)
ServiceInstall.add_field(5,'StartType',260)
ServiceInstall.add_field(6,'ErrorControl',260)
ServiceInstall.add_field(7,'LoadOrderGroup',7679)
ServiceInstall.add_field(8,'Dependencies',7679)
ServiceInstall.add_field(9,'StartName',7679)
ServiceInstall.add_field(10,'Password',7679)
ServiceInstall.add_field(11,'Arguments',7679)
ServiceInstall.add_field(12,'Component_',3400)
ServiceInstall.add_field(13,'Description',8191)
Shortcut = Table('Shortcut')
Shortcut.add_field(1,'Shortcut',11592)
Shortcut.add_field(2,'Directory_',3400)
Shortcut.add_field(3,'Name',3968)
Shortcut.add_field(4,'Component_',3400)
Shortcut.add_field(5,'Target',3400)
Shortcut.add_field(6,'Arguments',7679)
Shortcut.add_field(7,'Description',8191)
Shortcut.add_field(8,'Hotkey',5378)
Shortcut.add_field(9,'Icon_',7496)
Shortcut.add_field(10,'IconIndex',5378)
Shortcut.add_field(11,'ShowCmd',5378)
Shortcut.add_field(12,'WkDir',7496)
Signature = Table('Signature')
Signature.add_field(1,'Signature',11592)
Signature.add_field(2,'FileName',3583)
Signature.add_field(3,'MinVersion',7444)
Signature.add_field(4,'MaxVersion',7444)
Signature.add_field(5,'MinSize',4356)
Signature.add_field(6,'MaxSize',4356)
Signature.add_field(7,'MinDate',4356)
Signature.add_field(8,'MaxDate',4356)
Signature.add_field(9,'Languages',7679)
TextStyle = Table('TextStyle')
TextStyle.add_field(1,'TextStyle',11592)
TextStyle.add_field(2,'FaceName',3360)
TextStyle.add_field(3,'Size',1282)
TextStyle.add_field(4,'Color',4356)
TextStyle.add_field(5,'StyleBits',5378)
TypeLib = Table('TypeLib')
TypeLib.add_field(1,'LibID',11558)
TypeLib.add_field(2,'Language',9474)
TypeLib.add_field(3,'Component_',11592)
TypeLib.add_field(4,'Version',4356)
TypeLib.add_field(5,'Description',8064)
TypeLib.add_field(6,'Directory_',7496)
TypeLib.add_field(7,'Feature_',3366)
TypeLib.add_field(8,'Cost',4356)
UIText = Table('UIText')
UIText.add_field(1,'Key',11592)
UIText.add_field(2,'Text',8191)
Upgrade = Table('Upgrade')
Upgrade.add_field(1,'UpgradeCode',11558)
Upgrade.add_field(2,'VersionMin',15636)
Upgrade.add_field(3,'VersionMax',15636)
Upgrade.add_field(4,'Language',15871)
Upgrade.add_field(5,'Attributes',8452)
Upgrade.add_field(6,'Remove',7679)
Upgrade.add_field(7,'ActionProperty',3400)
Verb = Table('Verb')
Verb.add_field(1,'Extension_',11775)
Verb.add_field(2,'Verb',11552)
Verb.add_field(3,'Sequence',5378)
Verb.add_field(4,'Command',8191)
Verb.add_field(5,'Argument',8191)
tables=[_Validation, ActionText, AdminExecuteSequence, Condition, AdminUISequence, AdvtExecuteSequence, AdvtUISequence, AppId, AppSearch, Property, BBControl, Billboard, Feature, Binary, BindImage, File, CCPSearch, CheckBox, Class, Component, Icon, ProgId, ComboBox, CompLocator, Complus, Directory, Control, Dialog, ControlCondition, ControlEvent, CreateFolder, CustomAction, DrLocator, DuplicateFile, Environment, Error, EventMapping, Extension, MIME, FeatureComponents, FileSFPCatalog, SFPCatalog, Font, IniFile, IniLocator, InstallExecuteSequence, InstallUISequence, IsolatedComponent, LaunchCondition, ListBox, ListView, LockPermissions, Media, MoveFile, MsiAssembly, MsiAssemblyName, MsiDigitalCertificate, MsiDigitalSignature, MsiFileHash, MsiPatchHeaders, ODBCAttribute, ODBCDriver, ODBCDataSource, ODBCSourceAttribute, ODBCTranslator, Patch, PatchPackage, PublishComponent, RadioButton, Registry, RegLocator, RemoveFile, RemoveIniFile, RemoveRegistry, ReserveCost, SelfReg, ServiceControl, ServiceInstall, Shortcut, Signature, TextStyle, TypeLib, UIText, Upgrade, Verb]
_Validation_records = [
('_Validation','Table','N',None, None, None, None, 'Identifier',None, 'Name of table',),
('_Validation','Column','N',None, None, None, None, 'Identifier',None, 'Name of column',),
('_Validation','Description','Y',None, None, None, None, 'Text',None, 'Description of column',),
('_Validation','Set','Y',None, None, None, None, 'Text',None, 'Set of values that are permitted',),
('_Validation','Category','Y',None, None, None, None, None, 'Text;Formatted;Template;Condition;Guid;Path;Version;Language;Identifier;Binary;UpperCase;LowerCase;Filename;Paths;AnyPath;WildCardFilename;RegPath;KeyFormatted;CustomSource;Property;Cabinet;Shortcut;URL','String category',),
('_Validation','KeyColumn','Y',1,32,None, None, None, None, 'Column to which foreign key connects',),
('_Validation','KeyTable','Y',None, None, None, None, 'Identifier',None, 'For foreign key, Name of table to which data must link',),
('_Validation','MaxValue','Y',-2147483647,2147483647,None, None, None, None, 'Maximum value allowed',),
('_Validation','MinValue','Y',-2147483647,2147483647,None, None, None, None, 'Minimum value allowed',),
('_Validation','Nullable','N',None, None, None, None, None, 'Y;N;@','Whether the column is nullable',),
('ActionText','Description','Y',None, None, None, None, 'Text',None, 'Localized description displayed in progress dialog and log when action is executing.',),
('ActionText','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to be described.',),
('ActionText','Template','Y',None, None, None, None, 'Template',None, 'Optional localized format template used to format action data records for display during action execution.',),
('AdminExecuteSequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdminExecuteSequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdminExecuteSequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('Condition','Condition','Y',None, None, None, None, 'Condition',None, 'Expression evaluated to determine if Level in the Feature table is to change.',),
('Condition','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Reference to a Feature entry in Feature table.',),
('Condition','Level','N',0,32767,None, None, None, None, 'New selection Level to set in Feature table if Condition evaluates to TRUE.',),
('AdminUISequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdminUISequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdminUISequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('AdvtExecuteSequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdvtExecuteSequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdvtExecuteSequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('AdvtUISequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdvtUISequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdvtUISequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('AppId','AppId','N',None, None, None, None, 'Guid',None, None, ),
('AppId','ActivateAtStorage','Y',0,1,None, None, None, None, None, ),
('AppId','DllSurrogate','Y',None, None, None, None, 'Text',None, None, ),
('AppId','LocalService','Y',None, None, None, None, 'Text',None, None, ),
('AppId','RemoteServerName','Y',None, None, None, None, 'Formatted',None, None, ),
('AppId','RunAsInteractiveUser','Y',0,1,None, None, None, None, None, ),
('AppId','ServiceParameters','Y',None, None, None, None, 'Text',None, None, ),
('AppSearch','Property','N',None, None, None, None, 'Identifier',None, 'The property associated with a Signature',),
('AppSearch','Signature_','N',None, None, 'Signature;RegLocator;IniLocator;DrLocator;CompLocator',1,'Identifier',None, 'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.',),
('Property','Property','N',None, None, None, None, 'Identifier',None, 'Name of property, uppercase if settable by launcher or loader.',),
('Property','Value','N',None, None, None, None, 'Text',None, 'String value for property. Never null or empty.',),
('BBControl','Type','N',None, None, None, None, 'Identifier',None, 'The type of the control.',),
('BBControl','Y','N',0,32767,None, None, None, None, 'Vertical coordinate of the upper left corner of the bounding rectangle of the control.',),
('BBControl','Text','Y',None, None, None, None, 'Text',None, 'A string used to set the initial text contained within a control (if appropriate).',),
('BBControl','BBControl','N',None, None, None, None, 'Identifier',None, 'Name of the control. This name must be unique within a billboard, but can repeat on different billboard.',),
('BBControl','Attributes','Y',0,2147483647,None, None, None, None, 'A 32-bit word that specifies the attribute flags to be applied to this control.',),
('BBControl','Billboard_','N',None, None, 'Billboard',1,'Identifier',None, 'External key to the Billboard table, name of the billboard.',),
('BBControl','Height','N',0,32767,None, None, None, None, 'Height of the bounding rectangle of the control.',),
('BBControl','Width','N',0,32767,None, None, None, None, 'Width of the bounding rectangle of the control.',),
('BBControl','X','N',0,32767,None, None, None, None, 'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.',),
('Billboard','Action','Y',None, None, None, None, 'Identifier',None, 'The name of an action. The billboard is displayed during the progress messages received from this action.',),
('Billboard','Billboard','N',None, None, None, None, 'Identifier',None, 'Name of the billboard.',),
('Billboard','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'An external key to the Feature Table. The billboard is shown only if this feature is being installed.',),
('Billboard','Ordering','Y',0,32767,None, None, None, None, 'A positive integer. If there is more than one billboard corresponding to an action they will be shown in the order defined by this column.',),
('Feature','Description','Y',None, None, None, None, 'Text',None, 'Longer descriptive text describing a visible feature item.',),
('Feature','Attributes','N',None, None, None, None, None, '0;1;2;4;5;6;8;9;10;16;17;18;20;21;22;24;25;26;32;33;34;36;37;38;48;49;50;52;53;54','Feature attributes',),
('Feature','Feature','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular feature record.',),
('Feature','Directory_','Y',None, None, 'Directory',1,'UpperCase',None, 'The name of the Directory that can be configured by the UI. A non-null value will enable the browse button.',),
('Feature','Level','N',0,32767,None, None, None, None, 'The install level at which record will be initially selected. An install level of 0 will disable an item and prevent its display.',),
('Feature','Title','Y',None, None, None, None, 'Text',None, 'Short text identifying a visible feature item.',),
('Feature','Display','Y',0,32767,None, None, None, None, 'Numeric sort order, used to force a specific display ordering.',),
('Feature','Feature_Parent','Y',None, None, 'Feature',1,'Identifier',None, 'Optional key of a parent record in the same table. If the parent is not selected, then the record will not be installed. Null indicates a root item.',),
('Binary','Name','N',None, None, None, None, 'Identifier',None, 'Unique key identifying the binary data.',),
('Binary','Data','N',None, None, None, None, 'Binary',None, 'The unformatted binary data.',),
('BindImage','File_','N',None, None, 'File',1,'Identifier',None, 'The index into the File table. This must be an executable file.',),
('BindImage','Path','Y',None, None, None, None, 'Paths',None, 'A list of ; delimited paths that represent the paths to be searched for the import DLLS. The list is usually a list of properties each enclosed within square brackets [] .',),
('File','Sequence','N',1,32767,None, None, None, None, 'Sequence with respect to the media images; order must track cabinet order.',),
('File','Attributes','Y',0,32767,None, None, None, None, 'Integer containing bit flags representing file attributes (with the decimal value of each bit position in parentheses)',),
('File','File','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token, must match identifier in cabinet. For uncompressed files, this field is ignored.',),
('File','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the file.',),
('File','FileName','N',None, None, None, None, 'Filename',None, 'File name used for installation, may be localized. This may contain a "short name|long name" pair.',),
('File','FileSize','N',0,2147483647,None, None, None, None, 'Size of file in bytes (integer).',),
('File','Language','Y',None, None, None, None, 'Language',None, 'List of decimal language Ids, comma-separated if more than one.',),
('File','Version','Y',None, None, 'File',1,'Version',None, 'Version string for versioned files; Blank for unversioned files.',),
('CCPSearch','Signature_','N',None, None, 'Signature;RegLocator;IniLocator;DrLocator;CompLocator',1,'Identifier',None, 'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.',),
('CheckBox','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to the item.',),
('CheckBox','Value','Y',None, None, None, None, 'Formatted',None, 'The value string associated with the item.',),
('Class','Description','Y',None, None, None, None, 'Text',None, 'Localized description for the Class.',),
('Class','Attributes','Y',None, 32767,None, None, None, None, 'Class registration attributes.',),
('Class','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.',),
('Class','AppId_','Y',None, None, 'AppId',1,'Guid',None, 'Optional AppID containing DCOM information for associated application (string GUID).',),
('Class','Argument','Y',None, None, None, None, 'Formatted',None, 'optional argument for LocalServers.',),
('Class','CLSID','N',None, None, None, None, 'Guid',None, 'The CLSID of an OLE factory.',),
('Class','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
('Class','Context','N',None, None, None, None, 'Identifier',None, 'The numeric server context for this server. CLSCTX_xxxx',),
('Class','DefInprocHandler','Y',None, None, None, None, 'Filename','1;2;3','Optional default inproc handler. Only optionally provided if Context=CLSCTX_LOCAL_SERVER. Typically "ole32.dll" or "mapi32.dll"',),
('Class','FileTypeMask','Y',None, None, None, None, 'Text',None, 'Optional string containing information for the HKCRthis CLSID) key. If multiple patterns exist, they must be delimited by a semicolon, and numeric subkeys will be generated: 0,1,2...',),
('Class','Icon_','Y',None, None, 'Icon',1,'Identifier',None, 'Optional foreign key into the Icon Table, specifying the icon file associated with this CLSID. Will be written under the DefaultIcon key.',),
('Class','IconIndex','Y',-32767,32767,None, None, None, None, 'Optional icon index.',),
('Class','ProgId_Default','Y',None, None, 'ProgId',1,'Text',None, 'Optional ProgId associated with this CLSID.',),
('Component','Condition','Y',None, None, None, None, 'Condition',None, "A conditional statement that will disable this component if the specified condition evaluates to the 'True' state. If a component is disabled, it will not be installed, regardless of the 'Action' state associated with the component.",),
('Component','Attributes','N',None, None, None, None, None, None, 'Remote execution option, one of irsEnum',),
('Component','Component','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular component record.',),
('Component','ComponentId','Y',None, None, None, None, 'Guid',None, 'A string GUID unique to this component, version, and language.',),
('Component','Directory_','N',None, None, 'Directory',1,'Identifier',None, 'Required key of a Directory table record. This is actually a property name whose value contains the actual path, set either by the AppSearch action or with the default setting obtained from the Directory table.',),
('Component','KeyPath','Y',None, None, 'File;Registry;ODBCDataSource',1,'Identifier',None, 'Either the primary key into the File table, Registry table, or ODBCDataSource table. This extract path is stored when the component is installed, and is used to detect the presence of the component and to return the path to it.',),
('Icon','Name','N',None, None, None, None, 'Identifier',None, 'Primary key. Name of the icon file.',),
('Icon','Data','N',None, None, None, None, 'Binary',None, 'Binary stream. The binary icon data in PE (.DLL or .EXE) or icon (.ICO) format.',),
('ProgId','Description','Y',None, None, None, None, 'Text',None, 'Localized description for the Program identifier.',),
('ProgId','Icon_','Y',None, None, 'Icon',1,'Identifier',None, 'Optional foreign key into the Icon Table, specifying the icon file associated with this ProgId. Will be written under the DefaultIcon key.',),
('ProgId','IconIndex','Y',-32767,32767,None, None, None, None, 'Optional icon index.',),
('ProgId','ProgId','N',None, None, None, None, 'Text',None, 'The Program Identifier. Primary key.',),
('ProgId','Class_','Y',None, None, 'Class',1,'Guid',None, 'The CLSID of an OLE factory corresponding to the ProgId.',),
('ProgId','ProgId_Parent','Y',None, None, 'ProgId',1,'Text',None, 'The Parent Program Identifier. If specified, the ProgId column becomes a version independent prog id.',),
('ComboBox','Text','Y',None, None, None, None, 'Formatted',None, 'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
('ComboBox','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this item. All the items tied to the same property become part of the same combobox.',),
('ComboBox','Value','N',None, None, None, None, 'Formatted',None, 'The value string associated with this item. Selecting the line will set the associated property to this value.',),
('ComboBox','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list.\tThe integers do not have to be consecutive.',),
('CompLocator','Type','Y',0,1,None, None, None, None, 'A boolean value that determines if the registry value is a filename or a directory location.',),
('CompLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
('CompLocator','ComponentId','N',None, None, None, None, 'Guid',None, 'A string GUID unique to this component, version, and language.',),
('Complus','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the ComPlus component.',),
('Complus','ExpType','Y',0,32767,None, None, None, None, 'ComPlus component attributes.',),
('Directory','Directory','N',None, None, None, None, 'Identifier',None, 'Unique identifier for directory entry, primary key. If a property by this name is defined, it contains the full path to the directory.',),
('Directory','DefaultDir','N',None, None, None, None, 'DefaultDir',None, "The default sub-path under parent's path.",),
('Directory','Directory_Parent','Y',None, None, 'Directory',1,'Identifier',None, 'Reference to the entry in this table specifying the default parent directory. A record parented to itself or with a Null parent represents a root of the install tree.',),
('Control','Type','N',None, None, None, None, 'Identifier',None, 'The type of the control.',),
('Control','Y','N',0,32767,None, None, None, None, 'Vertical coordinate of the upper left corner of the bounding rectangle of the control.',),
('Control','Text','Y',None, None, None, None, 'Formatted',None, 'A string used to set the initial text contained within a control (if appropriate).',),
('Control','Property','Y',None, None, None, None, 'Identifier',None, 'The name of a defined property to be linked to this control. ',),
('Control','Attributes','Y',0,2147483647,None, None, None, None, 'A 32-bit word that specifies the attribute flags to be applied to this control.',),
('Control','Height','N',0,32767,None, None, None, None, 'Height of the bounding rectangle of the control.',),
('Control','Width','N',0,32767,None, None, None, None, 'Width of the bounding rectangle of the control.',),
('Control','X','N',0,32767,None, None, None, None, 'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.',),
('Control','Control','N',None, None, None, None, 'Identifier',None, 'Name of the control. This name must be unique within a dialog, but can repeat on different dialogs. ',),
('Control','Control_Next','Y',None, None, 'Control',2,'Identifier',None, 'The name of an other control on the same dialog. This link defines the tab order of the controls. The links have to form one or more cycles!',),
('Control','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'External key to the Dialog table, name of the dialog.',),
('Control','Help','Y',None, None, None, None, 'Text',None, 'The help strings used with the button. The text is optional. ',),
('Dialog','Attributes','Y',0,2147483647,None, None, None, None, 'A 32-bit word that specifies the attribute flags to be applied to this dialog.',),
('Dialog','Height','N',0,32767,None, None, None, None, 'Height of the bounding rectangle of the dialog.',),
('Dialog','Width','N',0,32767,None, None, None, None, 'Width of the bounding rectangle of the dialog.',),
('Dialog','Dialog','N',None, None, None, None, 'Identifier',None, 'Name of the dialog.',),
('Dialog','Control_Cancel','Y',None, None, 'Control',2,'Identifier',None, 'Defines the cancel control. Hitting escape or clicking on the close icon on the dialog is equivalent to pushing this button.',),
('Dialog','Control_Default','Y',None, None, 'Control',2,'Identifier',None, 'Defines the default control. Hitting return is equivalent to pushing this button.',),
('Dialog','Control_First','N',None, None, 'Control',2,'Identifier',None, 'Defines the control that has the focus when the dialog is created.',),
('Dialog','HCentering','N',0,100,None, None, None, None, 'Horizontal position of the dialog on a 0-100 scale. 0 means left end, 100 means right end of the screen, 50 center.',),
('Dialog','Title','Y',None, None, None, None, 'Formatted',None, "A text string specifying the title to be displayed in the title bar of the dialog's window.",),
('Dialog','VCentering','N',0,100,None, None, None, None, 'Vertical position of the dialog on a 0-100 scale. 0 means top end, 100 means bottom end of the screen, 50 center.',),
('ControlCondition','Action','N',None, None, None, None, None, 'Default;Disable;Enable;Hide;Show','The desired action to be taken on the specified control.',),
('ControlCondition','Condition','N',None, None, None, None, 'Condition',None, 'A standard conditional statement that specifies under which conditions the action should be triggered.',),
('ControlCondition','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'A foreign key to the Dialog table, name of the dialog.',),
('ControlCondition','Control_','N',None, None, 'Control',2,'Identifier',None, 'A foreign key to the Control table, name of the control.',),
('ControlEvent','Condition','Y',None, None, None, None, 'Condition',None, 'A standard conditional statement that specifies under which conditions an event should be triggered.',),
('ControlEvent','Ordering','Y',0,2147483647,None, None, None, None, 'An integer used to order several events tied to the same control. Can be left blank.',),
('ControlEvent','Argument','N',None, None, None, None, 'Formatted',None, 'A value to be used as a modifier when triggering a particular event.',),
('ControlEvent','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'A foreign key to the Dialog table, name of the dialog.',),
('ControlEvent','Control_','N',None, None, 'Control',2,'Identifier',None, 'A foreign key to the Control table, name of the control',),
('ControlEvent','Event','N',None, None, None, None, 'Formatted',None, 'An identifier that specifies the type of the event that should take place when the user interacts with control specified by the first two entries.',),
('CreateFolder','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table.',),
('CreateFolder','Directory_','N',None, None, 'Directory',1,'Identifier',None, 'Primary key, could be foreign key into the Directory table.',),
('CustomAction','Type','N',1,16383,None, None, None, None, 'The numeric custom action type, consisting of source location, code type, entry, option flags.',),
('CustomAction','Action','N',None, None, None, None, 'Identifier',None, 'Primary key, name of action, normally appears in sequence table unless private use.',),
('CustomAction','Source','Y',None, None, None, None, 'CustomSource',None, 'The table reference of the source of the code.',),
('CustomAction','Target','Y',None, None, None, None, 'Formatted',None, 'Excecution parameter, depends on the type of custom action',),
('DrLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
('DrLocator','Path','Y',None, None, None, None, 'AnyPath',None, 'The path on the user system. This is a either a subpath below the value of the Parent or a full path. The path may contain properties enclosed within [ ] that will be expanded.',),
('DrLocator','Depth','Y',0,32767,None, None, None, None, 'The depth below the path to which the Signature_ is recursively searched. If absent, the depth is assumed to be 0.',),
('DrLocator','Parent','Y',None, None, None, None, 'Identifier',None, 'The parent file signature. It is also a foreign key in the Signature table. If null and the Path column does not expand to a full path, then all the fixed drives of the user system are searched using the Path.',),
('DuplicateFile','File_','N',None, None, 'File',1,'Identifier',None, 'Foreign key referencing the source file to be duplicated.',),
('DuplicateFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the duplicate file.',),
('DuplicateFile','DestFolder','Y',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full pathname to a destination folder.',),
('DuplicateFile','DestName','Y',None, None, None, None, 'Filename',None, 'Filename to be given to the duplicate file.',),
('DuplicateFile','FileKey','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular file entry',),
('Environment','Name','N',None, None, None, None, 'Text',None, 'The name of the environmental value.',),
('Environment','Value','Y',None, None, None, None, 'Formatted',None, 'The value to set in the environmental settings.',),
('Environment','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the installing of the environmental value.',),
('Environment','Environment','N',None, None, None, None, 'Identifier',None, 'Unique identifier for the environmental variable setting',),
('Error','Error','N',0,32767,None, None, None, None, 'Integer error number, obtained from header file IError(...) macros.',),
('Error','Message','Y',None, None, None, None, 'Template',None, 'Error formatting template, obtained from user ed. or localizers.',),
('EventMapping','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'A foreign key to the Dialog table, name of the Dialog.',),
('EventMapping','Control_','N',None, None, 'Control',2,'Identifier',None, 'A foreign key to the Control table, name of the control.',),
('EventMapping','Event','N',None, None, None, None, 'Identifier',None, 'An identifier that specifies the type of the event that the control subscribes to.',),
('EventMapping','Attribute','N',None, None, None, None, 'Identifier',None, 'The name of the control attribute, that is set when this event is received.',),
('Extension','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.',),
('Extension','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
('Extension','Extension','N',None, None, None, None, 'Text',None, 'The extension associated with the table row.',),
('Extension','MIME_','Y',None, None, 'MIME',1,'Text',None, 'Optional Context identifier, typically "type/format" associated with the extension',),
('Extension','ProgId_','Y',None, None, 'ProgId',1,'Text',None, 'Optional ProgId associated with this extension.',),
('MIME','CLSID','Y',None, None, None, None, 'Guid',None, 'Optional associated CLSID.',),
('MIME','ContentType','N',None, None, None, None, 'Text',None, 'Primary key. Context identifier, typically "type/format".',),
('MIME','Extension_','N',None, None, 'Extension',1,'Text',None, 'Optional associated extension (without dot)',),
('FeatureComponents','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Foreign key into Feature table.',),
('FeatureComponents','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into Component table.',),
('FileSFPCatalog','File_','N',None, None, 'File',1,'Identifier',None, 'File associated with the catalog',),
('FileSFPCatalog','SFPCatalog_','N',None, None, 'SFPCatalog',1,'Filename',None, 'Catalog associated with the file',),
('SFPCatalog','SFPCatalog','N',None, None, None, None, 'Filename',None, 'File name for the catalog.',),
('SFPCatalog','Catalog','N',None, None, None, None, 'Binary',None, 'SFP Catalog',),
('SFPCatalog','Dependency','Y',None, None, None, None, 'Formatted',None, 'Parent catalog - only used by SFP',),
('Font','File_','N',None, None, 'File',1,'Identifier',None, 'Primary key, foreign key into File table referencing font file.',),
('Font','FontTitle','Y',None, None, None, None, 'Text',None, 'Font name.',),
('IniFile','Action','N',None, None, None, None, None, '0;1;3','The type of modification to be made, one of iifEnum',),
('IniFile','Value','N',None, None, None, None, 'Formatted',None, 'The value to be written.',),
('IniFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the installing of the .INI value.',),
('IniFile','FileName','N',None, None, None, None, 'Filename',None, 'The .INI file name in which to write the information',),
('IniFile','IniFile','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('IniFile','DirProperty','Y',None, None, None, None, 'Identifier',None, 'Foreign key into the Directory table denoting the directory where the .INI file is.',),
('IniFile','Key','N',None, None, None, None, 'Formatted',None, 'The .INI file key below Section.',),
('IniFile','Section','N',None, None, None, None, 'Formatted',None, 'The .INI file Section.',),
('IniLocator','Type','Y',0,2,None, None, None, None, 'An integer value that determines if the .INI value read is a filename or a directory location or to be used as is w/o interpretation.',),
('IniLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
('IniLocator','FileName','N',None, None, None, None, 'Filename',None, 'The .INI file name.',),
('IniLocator','Key','N',None, None, None, None, 'Text',None, 'Key value (followed by an equals sign in INI file).',),
('IniLocator','Section','N',None, None, None, None, 'Text',None, 'Section name within in file (within square brackets in INI file).',),
('IniLocator','Field','Y',0,32767,None, None, None, None, 'The field in the .INI line. If Field is null or 0 the entire line is read.',),
('InstallExecuteSequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('InstallExecuteSequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('InstallExecuteSequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('InstallUISequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('InstallUISequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('InstallUISequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('IsolatedComponent','Component_Application','N',None, None, 'Component',1,'Identifier',None, 'Key to Component table item for application',),
('IsolatedComponent','Component_Shared','N',None, None, 'Component',1,'Identifier',None, 'Key to Component table item to be isolated',),
('LaunchCondition','Description','N',None, None, None, None, 'Formatted',None, 'Localizable text to display when condition fails and install must abort.',),
('LaunchCondition','Condition','N',None, None, None, None, 'Condition',None, 'Expression which must evaluate to TRUE in order for install to commence.',),
('ListBox','Text','Y',None, None, None, None, 'Text',None, 'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
('ListBox','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this item. All the items tied to the same property become part of the same listbox.',),
('ListBox','Value','N',None, None, None, None, 'Formatted',None, 'The value string associated with this item. Selecting the line will set the associated property to this value.',),
('ListBox','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
('ListView','Text','Y',None, None, None, None, 'Text',None, 'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
('ListView','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this item. All the items tied to the same property become part of the same listview.',),
('ListView','Value','N',None, None, None, None, 'Identifier',None, 'The value string associated with this item. Selecting the line will set the associated property to this value.',),
('ListView','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
('ListView','Binary_','Y',None, None, 'Binary',1,'Identifier',None, 'The name of the icon to be displayed with the icon. The binary information is looked up from the Binary Table.',),
('LockPermissions','Table','N',None, None, None, None, 'Identifier','Directory;File;Registry','Reference to another table name',),
('LockPermissions','Domain','Y',None, None, None, None, 'Formatted',None, 'Domain name for user whose permissions are being set. (usually a property)',),
('LockPermissions','LockObject','N',None, None, None, None, 'Identifier',None, 'Foreign key into Registry or File table',),
('LockPermissions','Permission','Y',-2147483647,2147483647,None, None, None, None, 'Permission Access mask. Full Control = 268435456 (GENERIC_ALL = 0x10000000)',),
('LockPermissions','User','N',None, None, None, None, 'Formatted',None, 'User for permissions to be set. (usually a property)',),
('Media','Source','Y',None, None, None, None, 'Property',None, 'The property defining the location of the cabinet file.',),
('Media','Cabinet','Y',None, None, None, None, 'Cabinet',None, 'If some or all of the files stored on the media are compressed in a cabinet, the name of that cabinet.',),
('Media','DiskId','N',1,32767,None, None, None, None, 'Primary key, integer to determine sort order for table.',),
('Media','DiskPrompt','Y',None, None, None, None, 'Text',None, 'Disk name: the visible text actually printed on the disk. This will be used to prompt the user when this disk needs to be inserted.',),
('Media','LastSequence','N',0,32767,None, None, None, None, 'File sequence number for the last file for this media.',),
('Media','VolumeLabel','Y',None, None, None, None, 'Text',None, 'The label attributed to the volume.',),
('ModuleComponents','Component','N',None, None, 'Component',1,'Identifier',None, 'Component contained in the module.',),
('ModuleComponents','Language','N',None, None, 'ModuleSignature',2,None, None, 'Default language ID for module (may be changed by transform).',),
('ModuleComponents','ModuleID','N',None, None, 'ModuleSignature',1,'Identifier',None, 'Module containing the component.',),
('ModuleSignature','Language','N',None, None, None, None, None, None, 'Default decimal language of module.',),
('ModuleSignature','Version','N',None, None, None, None, 'Version',None, 'Version of the module.',),
('ModuleSignature','ModuleID','N',None, None, None, None, 'Identifier',None, 'Module identifier (String.GUID).',),
('ModuleDependency','ModuleID','N',None, None, 'ModuleSignature',1,'Identifier',None, 'Module requiring the dependency.',),
('ModuleDependency','ModuleLanguage','N',None, None, 'ModuleSignature',2,None, None, 'Language of module requiring the dependency.',),
('ModuleDependency','RequiredID','N',None, None, None, None, None, None, 'String.GUID of required module.',),
('ModuleDependency','RequiredLanguage','N',None, None, None, None, None, None, 'LanguageID of the required module.',),
('ModuleDependency','RequiredVersion','Y',None, None, None, None, 'Version',None, 'Version of the required version.',),
('ModuleExclusion','ModuleID','N',None, None, 'ModuleSignature',1,'Identifier',None, 'String.GUID of module with exclusion requirement.',),
('ModuleExclusion','ModuleLanguage','N',None, None, 'ModuleSignature',2,None, None, 'LanguageID of module with exclusion requirement.',),
('ModuleExclusion','ExcludedID','N',None, None, None, None, None, None, 'String.GUID of excluded module.',),
('ModuleExclusion','ExcludedLanguage','N',None, None, None, None, None, None, 'Language of excluded module.',),
('ModuleExclusion','ExcludedMaxVersion','Y',None, None, None, None, 'Version',None, 'Maximum version of excluded module.',),
('ModuleExclusion','ExcludedMinVersion','Y',None, None, None, None, 'Version',None, 'Minimum version of excluded module.',),
('MoveFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'If this component is not "selected" for installation or removal, no action will be taken on the associated MoveFile entry',),
('MoveFile','DestFolder','N',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full path to the destination directory',),
('MoveFile','DestName','Y',None, None, None, None, 'Filename',None, 'Name to be given to the original file after it is moved or copied. If blank, the destination file will be given the same name as the source file',),
('MoveFile','FileKey','N',None, None, None, None, 'Identifier',None, 'Primary key that uniquely identifies a particular MoveFile record',),
('MoveFile','Options','N',0,1,None, None, None, None, 'Integer value specifying the MoveFile operating mode, one of imfoEnum',),
('MoveFile','SourceFolder','Y',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full path to the source directory',),
('MoveFile','SourceName','Y',None, None, None, None, 'Text',None, "Name of the source file(s) to be moved or copied. Can contain the '*' or '?' wildcards.",),
('MsiAssembly','Attributes','Y',None, None, None, None, None, None, 'Assembly attributes',),
('MsiAssembly','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Foreign key into Feature table.',),
('MsiAssembly','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into Component table.',),
('MsiAssembly','File_Application','Y',None, None, 'File',1,'Identifier',None, 'Foreign key into File table, denoting the application context for private assemblies. Null for global assemblies.',),
('MsiAssembly','File_Manifest','Y',None, None, 'File',1,'Identifier',None, 'Foreign key into the File table denoting the manifest file for the assembly.',),
('MsiAssemblyName','Name','N',None, None, None, None, 'Text',None, 'The name part of the name-value pairs for the assembly name.',),
('MsiAssemblyName','Value','N',None, None, None, None, 'Text',None, 'The value part of the name-value pairs for the assembly name.',),
('MsiAssemblyName','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into Component table.',),
('MsiDigitalCertificate','CertData','N',None, None, None, None, 'Binary',None, 'A certificate context blob for a signer certificate',),
('MsiDigitalCertificate','DigitalCertificate','N',None, None, None, None, 'Identifier',None, 'A unique identifier for the row',),
('MsiDigitalSignature','Table','N',None, None, None, None, None, 'Media','Reference to another table name (only Media table is supported)',),
('MsiDigitalSignature','DigitalCertificate_','N',None, None, 'MsiDigitalCertificate',1,'Identifier',None, 'Foreign key to MsiDigitalCertificate table identifying the signer certificate',),
('MsiDigitalSignature','Hash','Y',None, None, None, None, 'Binary',None, 'The encoded hash blob from the digital signature',),
('MsiDigitalSignature','SignObject','N',None, None, None, None, 'Text',None, 'Foreign key to Media table',),
('MsiFileHash','File_','N',None, None, 'File',1,'Identifier',None, 'Primary key, foreign key into File table referencing file with this hash',),
('MsiFileHash','Options','N',0,32767,None, None, None, None, 'Various options and attributes for this hash.',),
('MsiFileHash','HashPart1','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiFileHash','HashPart2','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiFileHash','HashPart3','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiFileHash','HashPart4','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiPatchHeaders','StreamRef','N',None, None, None, None, 'Identifier',None, 'Primary key. A unique identifier for the row.',),
('MsiPatchHeaders','Header','N',None, None, None, None, 'Binary',None, 'Binary stream. The patch header, used for patch validation.',),
('ODBCAttribute','Value','Y',None, None, None, None, 'Text',None, 'Value for ODBC driver attribute',),
('ODBCAttribute','Attribute','N',None, None, None, None, 'Text',None, 'Name of ODBC driver attribute',),
('ODBCAttribute','Driver_','N',None, None, 'ODBCDriver',1,'Identifier',None, 'Reference to ODBC driver in ODBCDriver table',),
('ODBCDriver','Description','N',None, None, None, None, 'Text',None, 'Text used as registered name for driver, non-localized',),
('ODBCDriver','File_','N',None, None, 'File',1,'Identifier',None, 'Reference to key driver file',),
('ODBCDriver','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reference to associated component',),
('ODBCDriver','Driver','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized.internal token for driver',),
('ODBCDriver','File_Setup','Y',None, None, 'File',1,'Identifier',None, 'Optional reference to key driver setup DLL',),
('ODBCDataSource','Description','N',None, None, None, None, 'Text',None, 'Text used as registered name for data source',),
('ODBCDataSource','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reference to associated component',),
('ODBCDataSource','DataSource','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized.internal token for data source',),
('ODBCDataSource','DriverDescription','N',None, None, None, None, 'Text',None, 'Reference to driver description, may be existing driver',),
('ODBCDataSource','Registration','N',0,1,None, None, None, None, 'Registration option: 0=machine, 1=user, others t.b.d.',),
('ODBCSourceAttribute','Value','Y',None, None, None, None, 'Text',None, 'Value for ODBC data source attribute',),
('ODBCSourceAttribute','Attribute','N',None, None, None, None, 'Text',None, 'Name of ODBC data source attribute',),
('ODBCSourceAttribute','DataSource_','N',None, None, 'ODBCDataSource',1,'Identifier',None, 'Reference to ODBC data source in ODBCDataSource table',),
('ODBCTranslator','Description','N',None, None, None, None, 'Text',None, 'Text used as registered name for translator',),
('ODBCTranslator','File_','N',None, None, 'File',1,'Identifier',None, 'Reference to key translator file',),
('ODBCTranslator','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reference to associated component',),
('ODBCTranslator','File_Setup','Y',None, None, 'File',1,'Identifier',None, 'Optional reference to key translator setup DLL',),
('ODBCTranslator','Translator','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized.internal token for translator',),
('Patch','Sequence','N',0,32767,None, None, None, None, 'Primary key, sequence with respect to the media images; order must track cabinet order.',),
('Patch','Attributes','N',0,32767,None, None, None, None, 'Integer containing bit flags representing patch attributes',),
('Patch','File_','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token, foreign key to File table, must match identifier in cabinet.',),
('Patch','Header','Y',None, None, None, None, 'Binary',None, 'Binary stream. The patch header, used for patch validation.',),
('Patch','PatchSize','N',0,2147483647,None, None, None, None, 'Size of patch in bytes (integer).',),
('Patch','StreamRef_','Y',None, None, None, None, 'Identifier',None, 'Identifier. Foreign key to the StreamRef column of the MsiPatchHeaders table.',),
('PatchPackage','Media_','N',0,32767,None, None, None, None, 'Foreign key to DiskId column of Media table. Indicates the disk containing the patch package.',),
('PatchPackage','PatchId','N',None, None, None, None, 'Guid',None, 'A unique string GUID representing this patch.',),
('PublishComponent','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Foreign key into the Feature table.',),
('PublishComponent','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table.',),
('PublishComponent','ComponentId','N',None, None, None, None, 'Guid',None, 'A string GUID that represents the component id that will be requested by the alien product.',),
('PublishComponent','AppData','Y',None, None, None, None, 'Text',None, 'This is localisable Application specific data that can be associated with a Qualified Component.',),
('PublishComponent','Qualifier','N',None, None, None, None, 'Text',None, 'This is defined only when the ComponentId column is an Qualified Component Id. This is the Qualifier for ProvideComponentIndirect.',),
('RadioButton','Y','N',0,32767,None, None, None, None, 'The vertical coordinate of the upper left corner of the bounding rectangle of the radio button.',),
('RadioButton','Text','Y',None, None, None, None, 'Text',None, 'The visible title to be assigned to the radio button.',),
('RadioButton','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this radio button. All the buttons tied to the same property become part of the same group.',),
('RadioButton','Height','N',0,32767,None, None, None, None, 'The height of the button.',),
('RadioButton','Width','N',0,32767,None, None, None, None, 'The width of the button.',),
('RadioButton','X','N',0,32767,None, None, None, None, 'The horizontal coordinate of the upper left corner of the bounding rectangle of the radio button.',),
('RadioButton','Value','N',None, None, None, None, 'Formatted',None, 'The value string associated with this button. Selecting the button will set the associated property to this value.',),
('RadioButton','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
('RadioButton','Help','Y',None, None, None, None, 'Text',None, 'The help strings used with the button. The text is optional.',),
('Registry','Name','Y',None, None, None, None, 'Formatted',None, 'The registry value name.',),
('Registry','Value','Y',None, None, None, None, 'Formatted',None, 'The registry value.',),
('Registry','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the installing of the registry value.',),
('Registry','Key','N',None, None, None, None, 'RegPath',None, 'The key for the registry value.',),
('Registry','Registry','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('Registry','Root','N',-1,3,None, None, None, None, 'The predefined root key for the registry value, one of rrkEnum.',),
('RegLocator','Name','Y',None, None, None, None, 'Formatted',None, 'The registry value name.',),
('RegLocator','Type','Y',0,18,None, None, None, None, 'An integer value that determines if the registry value is a filename or a directory location or to be used as is w/o interpretation.',),
('RegLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table. If the type is 0, the registry values refers a directory, and _Signature is not a foreign key.',),
('RegLocator','Key','N',None, None, None, None, 'RegPath',None, 'The key for the registry value.',),
('RegLocator','Root','N',0,3,None, None, None, None, 'The predefined root key for the registry value, one of rrkEnum.',),
('RemoveFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the file to be removed.',),
('RemoveFile','FileKey','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular file entry',),
('RemoveFile','FileName','Y',None, None, None, None, 'WildCardFilename',None, 'Name of the file to be removed.',),
('RemoveFile','DirProperty','N',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full pathname to the folder of the file to be removed.',),
('RemoveFile','InstallMode','N',None, None, None, None, None, '1;2;3','Installation option, one of iimEnum.',),
('RemoveIniFile','Action','N',None, None, None, None, None, '2;4','The type of modification to be made, one of iifEnum.',),
('RemoveIniFile','Value','Y',None, None, None, None, 'Formatted',None, 'The value to be deleted. The value is required when Action is iifIniRemoveTag',),
('RemoveIniFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the deletion of the .INI value.',),
('RemoveIniFile','FileName','N',None, None, None, None, 'Filename',None, 'The .INI file name in which to delete the information',),
('RemoveIniFile','DirProperty','Y',None, None, None, None, 'Identifier',None, 'Foreign key into the Directory table denoting the directory where the .INI file is.',),
('RemoveIniFile','Key','N',None, None, None, None, 'Formatted',None, 'The .INI file key below Section.',),
('RemoveIniFile','Section','N',None, None, None, None, 'Formatted',None, 'The .INI file Section.',),
('RemoveIniFile','RemoveIniFile','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('RemoveRegistry','Name','Y',None, None, None, None, 'Formatted',None, 'The registry value name.',),
('RemoveRegistry','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the deletion of the registry value.',),
('RemoveRegistry','Key','N',None, None, None, None, 'RegPath',None, 'The key for the registry value.',),
('RemoveRegistry','Root','N',-1,3,None, None, None, None, 'The predefined root key for the registry value, one of rrkEnum',),
('RemoveRegistry','RemoveRegistry','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('ReserveCost','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reserve a specified amount of space if this component is to be installed.',),
('ReserveCost','ReserveFolder','Y',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full path to the destination directory',),
('ReserveCost','ReserveKey','N',None, None, None, None, 'Identifier',None, 'Primary key that uniquely identifies a particular ReserveCost record',),
('ReserveCost','ReserveLocal','N',0,2147483647,None, None, None, None, 'Disk space to reserve if linked component is installed locally.',),
('ReserveCost','ReserveSource','N',0,2147483647,None, None, None, None, 'Disk space to reserve if linked component is installed to run from the source location.',),
('SelfReg','File_','N',None, None, 'File',1,'Identifier',None, 'Foreign key into the File table denoting the module that needs to be registered.',),
('SelfReg','Cost','Y',0,32767,None, None, None, None, 'The cost of registering the module.',),
('ServiceControl','Name','N',None, None, None, None, 'Formatted',None, 'Name of a service. /, \\, comma and space are invalid',),
('ServiceControl','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table that controls the startup of the service',),
('ServiceControl','Event','N',0,187,None, None, None, None, 'Bit field: Install: 0x1 = Start, 0x2 = Stop, 0x8 = Delete, Uninstall: 0x10 = Start, 0x20 = Stop, 0x80 = Delete',),
('ServiceControl','ServiceControl','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('ServiceControl','Arguments','Y',None, None, None, None, 'Formatted',None, 'Arguments for the service. Separate by [~].',),
('ServiceControl','Wait','Y',0,1,None, None, None, None, 'Boolean for whether to wait for the service to fully start',),
('ServiceInstall','Name','N',None, None, None, None, 'Formatted',None, 'Internal Name of the Service',),
('ServiceInstall','Description','Y',None, None, None, None, 'Text',None, 'Description of service.',),
('ServiceInstall','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table that controls the startup of the service',),
('ServiceInstall','Arguments','Y',None, None, None, None, 'Formatted',None, 'Arguments to include in every start of the service, passed to WinMain',),
('ServiceInstall','ServiceInstall','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('ServiceInstall','Dependencies','Y',None, None, None, None, 'Formatted',None, 'Other services this depends on to start. Separate by [~], and end with [~][~]',),
('ServiceInstall','DisplayName','Y',None, None, None, None, 'Formatted',None, 'External Name of the Service',),
('ServiceInstall','ErrorControl','N',-2147483647,2147483647,None, None, None, None, 'Severity of error if service fails to start',),
('ServiceInstall','LoadOrderGroup','Y',None, None, None, None, 'Formatted',None, 'LoadOrderGroup',),
('ServiceInstall','Password','Y',None, None, None, None, 'Formatted',None, 'password to run service with. (with StartName)',),
('ServiceInstall','ServiceType','N',-2147483647,2147483647,None, None, None, None, 'Type of the service',),
('ServiceInstall','StartName','Y',None, None, None, None, 'Formatted',None, 'User or object name to run service as',),
('ServiceInstall','StartType','N',0,4,None, None, None, None, 'Type of the service',),
('Shortcut','Name','N',None, None, None, None, 'Filename',None, 'The name of the shortcut to be created.',),
('Shortcut','Description','Y',None, None, None, None, 'Text',None, 'The description for the shortcut.',),
('Shortcut','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table denoting the component whose selection gates the shortcut creation/deletion.',),
('Shortcut','Icon_','Y',None, None, 'Icon',1,'Identifier',None, 'Foreign key into the File table denoting the external icon file for the shortcut.',),
('Shortcut','IconIndex','Y',-32767,32767,None, None, None, None, 'The icon index for the shortcut.',),
('Shortcut','Directory_','N',None, None, 'Directory',1,'Identifier',None, 'Foreign key into the Directory table denoting the directory where the shortcut file is created.',),
('Shortcut','Target','N',None, None, None, None, 'Shortcut',None, 'The shortcut target. This is usually a property that is expanded to a file or a folder that the shortcut points to.',),
('Shortcut','Arguments','Y',None, None, None, None, 'Formatted',None, 'The command-line arguments for the shortcut.',),
('Shortcut','Shortcut','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('Shortcut','Hotkey','Y',0,32767,None, None, None, None, 'The hotkey for the shortcut. It has the virtual-key code for the key in the low-order byte, and the modifier flags in the high-order byte. ',),
('Shortcut','ShowCmd','Y',None, None, None, None, None, '1;3;7','The show command for the application window.The following values may be used.',),
('Shortcut','WkDir','Y',None, None, None, None, 'Identifier',None, 'Name of property defining location of working directory.',),
('Signature','FileName','N',None, None, None, None, 'Filename',None, 'The name of the file. This may contain a "short name|long name" pair.',),
('Signature','Signature','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature represents a unique file signature.',),
('Signature','Languages','Y',None, None, None, None, 'Language',None, 'The languages supported by the file.',),
('Signature','MaxDate','Y',0,2147483647,None, None, None, None, 'The maximum creation date of the file.',),
('Signature','MaxSize','Y',0,2147483647,None, None, None, None, 'The maximum size of the file. ',),
('Signature','MaxVersion','Y',None, None, None, None, 'Text',None, 'The maximum version of the file.',),
('Signature','MinDate','Y',0,2147483647,None, None, None, None, 'The minimum creation date of the file.',),
('Signature','MinSize','Y',0,2147483647,None, None, None, None, 'The minimum size of the file.',),
('Signature','MinVersion','Y',None, None, None, None, 'Text',None, 'The minimum version of the file.',),
('TextStyle','TextStyle','N',None, None, None, None, 'Identifier',None, 'Name of the style. The primary key of this table. This name is embedded in the texts to indicate a style change.',),
('TextStyle','Color','Y',0,16777215,None, None, None, None, 'An integer indicating the color of the string in the RGB format (Red, Green, Blue each 0-255, RGB = R + 256*G + 256^2*B).',),
('TextStyle','FaceName','N',None, None, None, None, 'Text',None, 'A string indicating the name of the font used. Required. The string must be at most 31 characters long.',),
('TextStyle','Size','N',0,32767,None, None, None, None, 'The size of the font used. This size is given in our units (1/12 of the system font height). Assuming that the system font is set to 12 point size, this is equivalent to the point size.',),
('TextStyle','StyleBits','Y',0,15,None, None, None, None, 'A combination of style bits.',),
('TypeLib','Description','Y',None, None, None, None, 'Text',None, None, ),
('TypeLib','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the type library to be operational.',),
('TypeLib','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
('TypeLib','Directory_','Y',None, None, 'Directory',1,'Identifier',None, 'Optional. The foreign key into the Directory table denoting the path to the help file for the type library.',),
('TypeLib','Language','N',0,32767,None, None, None, None, 'The language of the library.',),
('TypeLib','Version','Y',0,16777215,None, None, None, None, 'The version of the library. The minor version is in the lower 8 bits of the integer. The major version is in the next 16 bits. ',),
('TypeLib','Cost','Y',0,2147483647,None, None, None, None, 'The cost associated with the registration of the typelib. This column is currently optional.',),
('TypeLib','LibID','N',None, None, None, None, 'Guid',None, 'The GUID that represents the library.',),
('UIText','Text','Y',None, None, None, None, 'Text',None, 'The localized version of the string.',),
('UIText','Key','N',None, None, None, None, 'Identifier',None, 'A unique key that identifies the particular string.',),
('Upgrade','Attributes','N',0,2147483647,None, None, None, None, 'The attributes of this product set.',),
('Upgrade','Language','Y',None, None, None, None, 'Language',None, 'A comma-separated list of languages for either products in this set or products not in this set.',),
('Upgrade','ActionProperty','N',None, None, None, None, 'UpperCase',None, 'The property to set when a product in this set is found.',),
('Upgrade','Remove','Y',None, None, None, None, 'Formatted',None, 'The list of features to remove when uninstalling a product from this set. The default is "ALL".',),
('Upgrade','UpgradeCode','N',None, None, None, None, 'Guid',None, 'The UpgradeCode GUID belonging to the products in this set.',),
('Upgrade','VersionMax','Y',None, None, None, None, 'Text',None, 'The maximum ProductVersion of the products in this set. The set may or may not include products with this particular version.',),
('Upgrade','VersionMin','Y',None, None, None, None, 'Text',None, 'The minimum ProductVersion of the products in this set. The set may or may not include products with this particular version.',),
('Verb','Sequence','Y',0,32767,None, None, None, None, 'Order within the verbs for a particular extension. Also used simply to specify the default verb.',),
('Verb','Argument','Y',None, None, None, None, 'Formatted',None, 'Optional value for the command arguments.',),
('Verb','Extension_','N',None, None, 'Extension',1,'Text',None, 'The extension associated with the table row.',),
('Verb','Verb','N',None, None, None, None, 'Text',None, 'The verb for the command.',),
('Verb','Command','Y',None, None, None, None, 'Formatted',None, 'The command text.',),
]
|
gpl-3.0
|
Xarenn/Clint
|
check_exercise.py
|
1
|
1492
|
from sys import platform
import subprocess
from web_utils import get_text
import psutil
application_code = get_text()
ClintC = ["system" , "cmd" , "fstream"]
ClintJava = ["cmd" , "Runtime" , "Process" , "InputStream" ]
ClintPython = ["popen" , "getopt" , "sys" , "cmd"]
def kill(pid):
process = psutil.Process(pid)
for proc in process.get_children(recursive=True):
proc.kill()
process.kill()
def search_for_forbidden_text(app_code, string_list):
for string in sorted(set(string_list)):
if app_code.find(string) != -1:
return "The code contains forbidden strings."
return app_code
def execute_cmd_on_os(app_code, cmd):
if platform == "linux" or platform == "linux2":
check_exercises_on_linux(app_code, cmd)
elif platform == "win32":
check_exercises_on_windows(app_code, cmd)
def check_exercises_on_linux(app_code, cmd):
output = open("NUL", "w")
sp = subprocess.Popen(["/bin/bash", "-c", cmd], stdout=output, stderr=output)
sp.communicate(timeout=5)
output.close()
if sp.returncode != 0:
kill(sp.pid)
raise Exception("Command returned with non zero value!")
def check_exercises_on_windows(app_code, cmd):
output = open("NUL", "w")
sp = subprocess.Popen(["powershell", cmd], stdout=output, stderr=output)
sp.communicate(timeout=5)
output.close()
if sp.returncode != 0:
kill(sp.pid)
raise Exception("Command returned with non zero value!")
|
gpl-3.0
|
riteshshrv/django
|
tests/i18n/sampleproject/update_catalogs.py
|
344
|
1780
|
#!/usr/bin/env python
"""
Helper script to update sampleproject's translation catalogs.
When a bug has been identified related to i18n, this helps capture the issue
by using catalogs created from management commands.
Example:
The string "Two %% Three %%%" renders differently using trans and blocktrans.
This issue is difficult to debug, it could be a problem with extraction,
interpolation, or both.
How this script helps:
* Add {% trans "Two %% Three %%%" %} and blocktrans equivalent to templates.
* Run this script.
* Test extraction - verify the new msgid in sampleproject's django.po.
* Add a translation to sampleproject's django.po.
* Run this script.
* Test interpolation - verify templatetag rendering, test each in a template
that is rendered using an activated language from sampleproject's locale.
* Tests should fail, issue captured.
* Fix issue.
* Run this script.
* Tests all pass.
"""
import os
import re
import sys
proj_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.abspath(os.path.join(proj_dir, '..', '..', '..')))
def update_translation_catalogs():
"""Run makemessages and compilemessages in sampleproject."""
from django.core.management import call_command
prev_cwd = os.getcwd()
os.chdir(proj_dir)
call_command('makemessages')
call_command('compilemessages')
# keep the diff friendly - remove 'POT-Creation-Date'
pofile = os.path.join(proj_dir, 'locale', 'fr', 'LC_MESSAGES', 'django.po')
with open(pofile) as f:
content = f.read()
content = re.sub(r'^"POT-Creation-Date.+$\s', '', content, flags=re.MULTILINE)
with open(pofile, 'w') as f:
f.write(content)
os.chdir(prev_cwd)
if __name__ == "__main__":
update_translation_catalogs()
|
bsd-3-clause
|
Universal-Model-Converter/UMC3.0a
|
data/Python/x86/Lib/site-packages/OpenGL/raw/GL/SGIX/polynomial_ffd.py
|
3
|
2987
|
'''OpenGL extension SGIX.polynomial_ffd
Automatically generated by the get_gl_extensions script, do not edit!
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions
from OpenGL.GL import glget
import ctypes
EXTENSION_NAME = 'GL_SGIX_polynomial_ffd'
_DEPRECATED = False
GL_GEOMETRY_DEFORMATION_SGIX = constant.Constant( 'GL_GEOMETRY_DEFORMATION_SGIX', 0x8194 )
GL_TEXTURE_DEFORMATION_SGIX = constant.Constant( 'GL_TEXTURE_DEFORMATION_SGIX', 0x8195 )
GL_DEFORMATIONS_MASK_SGIX = constant.Constant( 'GL_DEFORMATIONS_MASK_SGIX', 0x8196 )
GL_MAX_DEFORMATION_ORDER_SGIX = constant.Constant( 'GL_MAX_DEFORMATION_ORDER_SGIX', 0x8197 )
glDeformationMap3dSGIX = platform.createExtensionFunction(
'glDeformationMap3dSGIX',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLdouble,constants.GLdouble,constants.GLint,constants.GLint,constants.GLdouble,constants.GLdouble,constants.GLint,constants.GLint,constants.GLdouble,constants.GLdouble,constants.GLint,constants.GLint,arrays.GLdoubleArray,),
doc='glDeformationMap3dSGIX(GLenum(target), GLdouble(u1), GLdouble(u2), GLint(ustride), GLint(uorder), GLdouble(v1), GLdouble(v2), GLint(vstride), GLint(vorder), GLdouble(w1), GLdouble(w2), GLint(wstride), GLint(worder), GLdoubleArray(points)) -> None',
argNames=('target','u1','u2','ustride','uorder','v1','v2','vstride','vorder','w1','w2','wstride','worder','points',),
deprecated=_DEPRECATED,
)
glDeformationMap3fSGIX = platform.createExtensionFunction(
'glDeformationMap3fSGIX',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLfloat,constants.GLfloat,constants.GLint,constants.GLint,constants.GLfloat,constants.GLfloat,constants.GLint,constants.GLint,constants.GLfloat,constants.GLfloat,constants.GLint,constants.GLint,arrays.GLfloatArray,),
doc='glDeformationMap3fSGIX(GLenum(target), GLfloat(u1), GLfloat(u2), GLint(ustride), GLint(uorder), GLfloat(v1), GLfloat(v2), GLint(vstride), GLint(vorder), GLfloat(w1), GLfloat(w2), GLint(wstride), GLint(worder), GLfloatArray(points)) -> None',
argNames=('target','u1','u2','ustride','uorder','v1','v2','vstride','vorder','w1','w2','wstride','worder','points',),
deprecated=_DEPRECATED,
)
glDeformSGIX = platform.createExtensionFunction(
'glDeformSGIX',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLbitfield,),
doc='glDeformSGIX(GLbitfield(mask)) -> None',
argNames=('mask',),
deprecated=_DEPRECATED,
)
glLoadIdentityDeformationMapSGIX = platform.createExtensionFunction(
'glLoadIdentityDeformationMapSGIX',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLbitfield,),
doc='glLoadIdentityDeformationMapSGIX(GLbitfield(mask)) -> None',
argNames=('mask',),
deprecated=_DEPRECATED,
)
def glInitPolynomialFfdSGIX():
'''Return boolean indicating whether this extension is available'''
return extensions.hasGLExtension( EXTENSION_NAME )
|
mit
|
xiaohan2012/twitter-sent-dnn
|
test_rntn_layer.py
|
4
|
1452
|
import numpy as np
from recnn_train import RNTNLayer as TheanoRNTNLayer
import theano
from test_util import assert_matrix_eq
V_val = np.asarray((np.arange(3*6*6) / 100).reshape((3,6,6)), dtype=theano.config.floatX)
W_val = np.asarray((np.arange(3*6) / 100).reshape((3,6)), dtype=theano.config.floatX)
theano_l = TheanoRNTNLayer(np.random.RandomState(1234), 3,
V = theano.shared(value = V_val,
name = "V",
borrow = True),
W = theano.shared(value = W_val,
name = "W",
borrow = True)
)
left_input = np.asarray([[0,0,1]], dtype=theano.config.floatX)
right_input = np.asarray([[0,1,0]], dtype=theano.config.floatX)
################
# NUMPY IMPML ##
################
from recnn import RNTNLayer as NumpyRNTNLayer
numpy_l = NumpyRNTNLayer(theano_l.V.get_value(), theano_l.W.get_value())
actual = numpy_l.output(left_input, right_input)
actual1 = numpy_l.output(np.squeeze(left_input), np.squeeze(right_input)) #passing 1d array
################
# THEANO PART #
################
left = theano.tensor.drow("left")
right = theano.tensor.drow("right")
f = theano.function(
inputs = [left, right],
outputs = theano_l.output(left, right)
)
expected = f(left_input, right_input)
assert_matrix_eq(actual, expected, "output")
assert_matrix_eq(actual1, expected, "output(1d passed in)")
|
mit
|
akiss77/servo
|
tests/wpt/css-tests/tools/html5lib/html5lib/tests/tokenizertotree.py
|
483
|
1965
|
from __future__ import absolute_import, division, unicode_literals
import sys
import os
import json
import re
import html5lib
from . import support
from . import test_tokenizer
p = html5lib.HTMLParser()
unnamespaceExpected = re.compile(r"^(\|\s*)<html ([^>]+)>", re.M).sub
def main(out_path):
if not os.path.exists(out_path):
sys.stderr.write("Path %s does not exist" % out_path)
sys.exit(1)
for filename in support.get_data_files('tokenizer', '*.test'):
run_file(filename, out_path)
def run_file(filename, out_path):
try:
tests_data = json.load(open(filename, "r"))
except ValueError:
sys.stderr.write("Failed to load %s\n" % filename)
return
name = os.path.splitext(os.path.split(filename)[1])[0]
output_file = open(os.path.join(out_path, "tokenizer_%s.dat" % name), "w")
if 'tests' in tests_data:
for test_data in tests_data['tests']:
if 'initialStates' not in test_data:
test_data["initialStates"] = ["Data state"]
for initial_state in test_data["initialStates"]:
if initial_state != "Data state":
# don't support this yet
continue
test = make_test(test_data)
output_file.write(test)
output_file.close()
def make_test(test_data):
if 'doubleEscaped' in test_data:
test_data = test_tokenizer.unescape_test(test_data)
rv = []
rv.append("#data")
rv.append(test_data["input"].encode("utf8"))
rv.append("#errors")
tree = p.parse(test_data["input"])
output = p.tree.testSerializer(tree)
output = "\n".join(("| " + line[3:]) if line.startswith("| ") else line
for line in output.split("\n"))
output = unnamespaceExpected(r"\1<\2>", output)
rv.append(output.encode("utf8"))
rv.append("")
return "\n".join(rv)
if __name__ == "__main__":
main(sys.argv[1])
|
mpl-2.0
|
vks/servo
|
tests/wpt/update/updatecommandline.py
|
210
|
1736
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
def create_parser():
from wptrunner import wptcommandline
parser = wptcommandline.create_parser_update()
parser.add_argument("--upstream", dest="upstream", action="store_true", default=None,
help="Push local changes to upstream repository even when not syncing")
parser.add_argument("--no-upstream", dest="upstream", action="store_false", default=None,
help="Dont't push local changes to upstream repository when syncing")
parser.add_argument("--token-file", action="store", type=wptcommandline.abs_path,
help="Path to file containing github token")
parser.add_argument("--token", action="store", help="GitHub token to use")
return parser
def check_args(kwargs):
from wptrunner import wptcommandline
wptcommandline.set_from_config(kwargs)
kwargs["upstream"] = kwargs["upstream"] if kwargs["upstream"] is not None else kwargs["sync"]
if kwargs["upstream"]:
if kwargs["rev"]:
raise ValueError("Setting --rev with --upstream isn't supported")
if kwargs["token"] is None:
if kwargs["token_file"] is None:
raise ValueError("Must supply either a token file or a token")
with open(kwargs["token_file"]) as f:
token = f.read().strip()
kwargs["token"] = token
del kwargs["token_file"]
return kwargs
def parse_args():
parser = create_parser()
kwargs = vars(parser.parse_args())
return check_args(kwargs)
|
mpl-2.0
|
upconsulting/IsisCB
|
isiscb/isisdata/migrations/0024_auto_20160623_2302.py
|
1
|
2305
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-06-23 23:02
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('isisdata', '0023_auto_20160621_1726'),
]
operations = [
migrations.CreateModel(
name='ISODateValue',
fields=[
('value_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='isisdata.Value')),
('year', models.IntegerField(default=0)),
('month', models.IntegerField(default=0)),
('day', models.IntegerField(default=0)),
],
options={
'verbose_name': 'isodate',
},
bases=('isisdata.value',),
),
migrations.AlterField(
model_name='acrelation',
name='type_controlled',
field=models.CharField(blank=True, choices=[(b'AU', b'Author'), (b'ED', b'Editor'), (b'AD', b'Advisor'), (b'CO', b'Contributor'), (b'TR', b'Translator'), (b'SU', b'Subject'), (b'CA', b'Category'), (b'PU', b'Publisher'), (b'SC', b'School'), (b'IN', b'Institution'), (b'ME', b'Meeting'), (b'PE', b'Periodical'), (b'BS', b'Book Series'), (b'CM', b'Committee Member')], help_text=b'Used to specify the nature of the relationship between authority (as the subject) and the citation (as the object).', max_length=2, null=True, verbose_name=b'relationship type'),
),
migrations.AlterField(
model_name='historicalacrelation',
name='type_controlled',
field=models.CharField(blank=True, choices=[(b'AU', b'Author'), (b'ED', b'Editor'), (b'AD', b'Advisor'), (b'CO', b'Contributor'), (b'TR', b'Translator'), (b'SU', b'Subject'), (b'CA', b'Category'), (b'PU', b'Publisher'), (b'SC', b'School'), (b'IN', b'Institution'), (b'ME', b'Meeting'), (b'PE', b'Periodical'), (b'BS', b'Book Series'), (b'CM', b'Committee Member')], help_text=b'Used to specify the nature of the relationship between authority (as the subject) and the citation (as the object).', max_length=2, null=True, verbose_name=b'relationship type'),
),
]
|
mit
|
UniMOOC/gcb-new-module
|
modules/dashboard/peer_review.py
|
8
|
12409
|
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes for management of individual students' peer review assignments."""
__author__ = 'Sean Lip ([email protected])'
import os
import urllib
import messages
from controllers.lessons import create_readonly_assessment_params
from controllers.utils import ApplicationHandler
from models import courses
from models import models
from models import review
from models import roles
from models import student_work
from models import transforms
from modules.review import domain
class AssignmentsRights(object):
"""Manages view/edit rights for assignments and reviews."""
@classmethod
def can_view(cls, handler):
return roles.Roles.is_course_admin(handler.app_context)
@classmethod
def can_edit(cls, handler):
return roles.Roles.is_course_admin(handler.app_context)
@classmethod
def can_delete(cls, handler):
return cls.can_edit(handler)
@classmethod
def can_add(cls, handler):
return cls.can_edit(handler)
class AssignmentManager(ApplicationHandler):
"""A view for managing human-reviewed assignments."""
def get_assignment_html(
self, peer_reviewed_units, unit_id=None, reviewee_id=None,
error_msg=None, readonly_assessment=None, review_steps=None,
reviewers=None, reviews_params=None, model_version=None):
"""Renders a template allowing an admin to select an assignment."""
edit_url = self.canonicalize_url('/dashboard')
return self.render_template_to_html({
'REVIEW_STATE_COMPLETED': domain.REVIEW_STATE_COMPLETED,
'add_reviewer_action': self.get_action_url('add_reviewer'),
'add_reviewer_xsrf_token': self.create_xsrf_token('add_reviewer'),
'delete_reviewer_action': self.get_action_url('delete_reviewer'),
'delete_reviewer_xsrf_token': self.create_xsrf_token(
'delete_reviewer'),
'edit_assignment_action': 'edit_assignment',
'edit_url': edit_url,
'error_msg': error_msg,
'peer_reviewed_units': peer_reviewed_units,
'readonly_student_assessment': readonly_assessment,
'reviewee_id': reviewee_id or '',
'reviewers': reviewers,
'reviews_params': reviews_params,
'review_steps': review_steps,
'unit_id': unit_id,
'model_version': model_version
}, 'assignments_menu.html', [os.path.dirname(__file__)])
def parse_request(self, course, unit_id, reviewee_id, reviewer_id=None):
"""Parses request parameters in a GET or POST request.
Args:
course: Course. A course object.
unit_id: str. The id of the unit.
reviewee_id: str. The email address of the reviewee.
reviewer_id: str. The email address of the reviewer.
Returns:
- a dict containing some subset of the following keys: unit,
reviewee, reviewer.
- if necessary, an error message to be passed to the frontend.
"""
request_params = {}
# Check unit validity.
if not unit_id:
return request_params, ''
unit = course.find_unit_by_id(unit_id)
if not unit:
return request_params, '404: Unit not found.'
if (unit.workflow.get_grader() != courses.HUMAN_GRADER or
unit.workflow.get_matcher() != review.PEER_MATCHER):
return request_params, '412: This unit is not peer-graded.'
request_params['unit'] = unit
# Check reviewee validity.
if not reviewee_id:
return request_params, '412: No student email supplied.'
reviewee = models.Student.get_enrolled_student_by_email(reviewee_id)
if not reviewee:
return (request_params,
'412: No student with this email address exists.')
request_params['reviewee'] = reviewee
# Check reviewer validity, if applicable.
if reviewer_id is not None:
if not reviewer_id:
return request_params, '412: No reviewer email supplied.'
reviewer = models.Student.get_enrolled_student_by_email(reviewer_id)
if not reviewer:
return (request_params,
'412: No reviewer with this email address exists.')
request_params['reviewer'] = reviewer
return request_params, ''
def get_edit_assignment(self):
"""Shows interface for selecting and viewing a student assignment."""
if not AssignmentsRights.can_view(self):
self.error(401)
return
course = courses.Course(self)
peer_reviewed_units = course.get_peer_reviewed_units()
page_title = 'Peer Review'
template_values = {}
template_values['page_title'] = self.format_title(page_title)
template_values['page_description'] = (
messages.ASSIGNMENTS_MENU_DESCRIPTION)
unit_id = self.request.get('unit_id')
if not unit_id:
# No unit has been set yet, so display an empty form.
template_values['main_content'] = self.get_assignment_html(
peer_reviewed_units)
self.render_page(template_values)
return
reviewee_id = self.request.get('reviewee_id')
# This field may be populated due to a redirect from a POST method.
post_error_msg = self.request.get('post_error_msg')
request_params, error_msg = self.parse_request(
course, unit_id, reviewee_id)
unit = request_params.get('unit')
reviewee = request_params.get('reviewee')
if error_msg:
template_values['main_content'] = self.get_assignment_html(
peer_reviewed_units, unit_id=unit_id, reviewee_id=reviewee_id,
error_msg=error_msg)
self.render_page(template_values)
return
model_version = course.get_assessment_model_version(unit)
assert model_version in courses.SUPPORTED_ASSESSMENT_MODEL_VERSIONS
if model_version == courses.ASSESSMENT_MODEL_VERSION_1_4:
get_readonly_assessment = self.get_readonly_assessment_1_4
get_readonly_review = self.get_readonly_review_1_4
elif model_version == courses.ASSESSMENT_MODEL_VERSION_1_5:
get_readonly_assessment = self.get_readonly_assessment_1_5
get_readonly_review = self.get_readonly_review_1_5
else:
raise ValueError('Bad assessment model version: %s' % model_version)
# Render content.
rp = course.get_reviews_processor()
submission_and_review_steps = rp.get_submission_and_review_steps(
unit.unit_id, reviewee.get_key())
if not submission_and_review_steps:
template_values['main_content'] = self.get_assignment_html(
peer_reviewed_units, unit_id=unit_id, reviewee_id=reviewee_id,
error_msg='412: This student hasn\'t submitted the assignment.'
)
self.render_page(template_values)
return
readonly_assessment = get_readonly_assessment(
unit, submission_and_review_steps[0])
review_steps = submission_and_review_steps[1]
reviews = rp.get_reviews_by_keys(
unit.unit_id,
[review_step.review_key for review_step in review_steps],
handle_empty_keys=True)
reviews_params = []
reviewers = []
for idx, review_step in enumerate(review_steps):
params = get_readonly_review(unit, reviews[idx])
reviews_params.append(params)
reviewer = models.Student.get_student_by_user_id(
review_step.reviewer_key.name()).key().name()
reviewers.append(reviewer)
assert len(reviewers) == len(review_steps)
assert len(reviews_params) == len(review_steps)
template_values['main_content'] = self.get_assignment_html(
peer_reviewed_units, unit_id=unit_id, reviewee_id=reviewee_id,
readonly_assessment=readonly_assessment, review_steps=review_steps,
error_msg=post_error_msg, reviewers=reviewers,
reviews_params=reviews_params,
model_version=model_version)
self.render_page(template_values)
def get_readonly_assessment_1_4(self, unit, submission_content):
return create_readonly_assessment_params(
courses.Course(self).get_assessment_content(unit),
student_work.StudentWorkUtils.get_answer_list(submission_content))
def get_readonly_assessment_1_5(self, unit, submission_content):
return {
'content': unit.html_content,
'saved_answers': transforms.dumps(submission_content)
}
def get_readonly_review_1_4(self, unit, review_content):
return create_readonly_assessment_params(
courses.Course(self).get_review_content(unit),
student_work.StudentWorkUtils.get_answer_list(review_content))
def get_readonly_review_1_5(self, unit, review_content):
return {
'content': unit.html_review_form,
'saved_answers': transforms.dumps(review_content)
}
def post_add_reviewer(self):
"""Adds a new reviewer to a human-reviewed assignment."""
if not AssignmentsRights.can_edit(self):
self.error(401)
return
course = courses.Course(self)
unit_id = self.request.get('unit_id')
reviewee_id = self.request.get('reviewee_id')
reviewer_id = self.request.get('reviewer_id')
request_params, post_error_msg = self.parse_request(
course, unit_id, reviewee_id, reviewer_id=reviewer_id)
redirect_params = {
'action': 'edit_assignment',
'reviewee_id': reviewee_id,
'reviewer_id': reviewer_id,
'unit_id': unit_id,
}
if post_error_msg:
redirect_params['post_error_msg'] = post_error_msg
self.redirect('/dashboard?%s' % urllib.urlencode(redirect_params))
return
unit = request_params.get('unit')
reviewee = request_params.get('reviewee')
reviewer = request_params.get('reviewer')
rp = course.get_reviews_processor()
reviewee_key = reviewee.get_key()
reviewer_key = reviewer.get_key()
try:
rp.add_reviewer(unit.unit_id, reviewee_key, reviewer_key)
except domain.TransitionError:
redirect_params['post_error_msg'] = (
'412: The reviewer is already assigned to this submission.')
self.redirect('/dashboard?%s' % urllib.urlencode(redirect_params))
def post_delete_reviewer(self):
"""Deletes a reviewer from a human-reviewed assignment."""
if not AssignmentsRights.can_edit(self):
self.error(401)
return
course = courses.Course(self)
unit_id = self.request.get('unit_id')
reviewee_id = self.request.get('reviewee_id')
review_step_key = self.request.get('key')
request_params, post_error_msg = self.parse_request(
course, unit_id, reviewee_id)
redirect_params = {
'action': 'edit_assignment',
'reviewee_id': reviewee_id,
'unit_id': unit_id,
}
if post_error_msg:
redirect_params['post_error_msg'] = post_error_msg
self.redirect('/dashboard?%s' % urllib.urlencode(redirect_params))
return
rp = course.get_reviews_processor()
unit = request_params.get('unit')
rp.delete_reviewer(unit.unit_id, review_step_key)
self.redirect('/dashboard?%s' % urllib.urlencode(redirect_params))
|
apache-2.0
|
tiwillia/openshift-tools
|
openshift/installer/vendored/openshift-ansible-3.4.40/roles/openshift_cli/library/openshift_container_binary_sync.py
|
27
|
4858
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# vim: expandtab:tabstop=4:shiftwidth=4
# pylint: disable=missing-docstring,invalid-name
#
import random
import tempfile
import shutil
import os.path
# pylint: disable=redefined-builtin,wildcard-import,unused-wildcard-import
from ansible.module_utils.basic import *
DOCUMENTATION = '''
---
module: openshift_container_binary_sync
short_description: Copies OpenShift binaries out of the given image tag to host system.
'''
class BinarySyncError(Exception):
def __init__(self, msg):
super(BinarySyncError, self).__init__(msg)
self.msg = msg
# pylint: disable=too-few-public-methods
class BinarySyncer(object):
"""
Syncs the openshift, oc, oadm, and kubectl binaries/symlinks out of
a container onto the host system.
"""
def __init__(self, module, image, tag):
self.module = module
self.changed = False
self.output = []
self.bin_dir = '/usr/local/bin'
self.image = image
self.tag = tag
self.temp_dir = None # TBD
def sync(self):
container_name = "openshift-cli-%s" % random.randint(1, 100000)
rc, stdout, stderr = self.module.run_command(['docker', 'create', '--name',
container_name, '%s:%s' % (self.image, self.tag)])
if rc:
raise BinarySyncError("Error creating temporary docker container. stdout=%s, stderr=%s" %
(stdout, stderr))
self.output.append(stdout)
try:
self.temp_dir = tempfile.mkdtemp()
self.output.append("Using temp dir: %s" % self.temp_dir)
rc, stdout, stderr = self.module.run_command(['docker', 'cp', "%s:/usr/bin/openshift" % container_name,
self.temp_dir])
if rc:
raise BinarySyncError("Error copying file from docker container: stdout=%s, stderr=%s" %
(stdout, stderr))
rc, stdout, stderr = self.module.run_command(['docker', 'cp', "%s:/usr/bin/oc" % container_name,
self.temp_dir])
if rc:
raise BinarySyncError("Error copying file from docker container: stdout=%s, stderr=%s" %
(stdout, stderr))
self._sync_binary('openshift')
# In older versions, oc was a symlink to openshift:
if os.path.islink(os.path.join(self.temp_dir, 'oc')):
self._sync_symlink('oc', 'openshift')
else:
self._sync_binary('oc')
# Ensure correct symlinks created:
self._sync_symlink('kubectl', 'openshift')
self._sync_symlink('oadm', 'openshift')
finally:
shutil.rmtree(self.temp_dir)
self.module.run_command(['docker', 'rm', container_name])
def _sync_symlink(self, binary_name, link_to):
""" Ensure the given binary name exists and links to the expected binary. """
# The symlink we are creating:
link_path = os.path.join(self.bin_dir, binary_name)
# The expected file we should be linking to:
link_dest = os.path.join(self.bin_dir, link_to)
if not os.path.exists(link_path) or \
not os.path.islink(link_path) or \
os.path.realpath(link_path) != os.path.realpath(link_dest):
if os.path.exists(link_path):
os.remove(link_path)
os.symlink(link_to, os.path.join(self.bin_dir, binary_name))
self.output.append("Symlinked %s to %s." % (link_path, link_dest))
self.changed = True
def _sync_binary(self, binary_name):
src_path = os.path.join(self.temp_dir, binary_name)
dest_path = os.path.join(self.bin_dir, binary_name)
incoming_checksum = self.module.run_command(['sha256sum', src_path])[1]
if not os.path.exists(dest_path) or self.module.run_command(['sha256sum', dest_path])[1] != incoming_checksum:
shutil.move(src_path, dest_path)
self.output.append("Moved %s to %s." % (src_path, dest_path))
self.changed = True
def main():
module = AnsibleModule(
argument_spec=dict(
image=dict(required=True),
tag=dict(required=True),
),
supports_check_mode=True
)
image = module.params['image']
tag = module.params['tag']
binary_syncer = BinarySyncer(module, image, tag)
try:
binary_syncer.sync()
except BinarySyncError as ex:
module.fail_json(msg=ex.msg)
return module.exit_json(changed=binary_syncer.changed,
output=binary_syncer.output)
if __name__ == '__main__':
main()
|
apache-2.0
|
Sweet-Peas/mbed
|
workspace_tools/test_mysql.py
|
51
|
12049
|
"""
mbed SDK
Copyright (c) 2011-2014 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: Przemyslaw Wirkus <[email protected]>
"""
import re
import MySQLdb as mdb
# Imports from TEST API
from workspace_tools.test_db import BaseDBAccess
class MySQLDBAccess(BaseDBAccess):
""" Wrapper for MySQL DB access for common test suite interface
"""
def __init__(self):
BaseDBAccess.__init__(self)
self.DB_TYPE = 'mysql'
def detect_database(self, verbose=False):
""" detect database and return VERION data structure or string (verbose=True)
"""
query = 'SHOW VARIABLES LIKE "%version%"'
rows = self.select_all(query)
if verbose:
result = []
for row in rows:
result.append("\t%s: %s"% (row['Variable_name'], row['Value']))
result = "\n".join(result)
else:
result = rows
return result
def parse_db_connection_string(self, str):
""" Parsing SQL DB connection string. String should contain:
- DB Name, user name, password, URL (DB host), name
Function should return tuple with parsed (host, user, passwd, db) or None if error
E.g. connection string: 'mysql://username:[email protected]/db_name'
"""
result = BaseDBAccess().parse_db_connection_string(str)
if result is not None:
(db_type, username, password, host, db_name) = result
if db_type != 'mysql':
result = None
return result
def is_connected(self):
""" Returns True if we are connected to database
"""
return self.db_object is not None
def connect(self, host, user, passwd, db):
""" Connects to DB and returns DB object
"""
try:
self.db_object = mdb.connect(host=host, user=user, passwd=passwd, db=db)
# Let's remember connection credentials
self.db_type = self.DB_TYPE
self.host = host
self.user = user
self.passwd = passwd
self.db = db
except mdb.Error, e:
print "Error %d: %s"% (e.args[0], e.args[1])
self.db_object = None
self.db_type = None
self.host = None
self.user = None
self.passwd = None
self.db = None
def connect_url(self, db_url):
""" Connects to database using db_url (database url parsing),
store host, username, password, db_name
"""
result = self.parse_db_connection_string(db_url)
if result is not None:
(db_type, username, password, host, db_name) = result
if db_type == self.DB_TYPE:
self.connect(host, username, password, db_name)
def reconnect(self):
""" Reconnects to DB and returns DB object using stored host name,
database name and credentials (user name and password)
"""
self.connect(self.host, self.user, self.passwd, self.db)
def disconnect(self):
""" Close DB connection
"""
if self.db_object:
self.db_object.close()
self.db_object = None
self.db_type = None
def escape_string(self, str):
""" Escapes string so it can be put in SQL query between quotes
"""
con = self.db_object
result = con.escape_string(str)
return result if result else ''
def select_all(self, query):
""" Execute SELECT query and get all results
"""
con = self.db_object
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
rows = cur.fetchall()
return rows
def insert(self, query, commit=True):
""" Execute INSERT query, define if you want to commit
"""
con = self.db_object
cur = con.cursor()
cur.execute(query)
if commit:
con.commit()
return cur.lastrowid
def get_next_build_id(self, name, desc='', location='', type=None, status=None):
""" Insert new build_id (DB unique build like ID number to send all test results)
"""
if status is None:
status = self.BUILD_ID_STATUS_STARTED
if type is None:
type = self.BUILD_ID_TYPE_TEST
query = """INSERT INTO `%s` (%s_name, %s_desc, %s_location, %s_type_fk, %s_status_fk)
VALUES ('%s', '%s', '%s', %d, %d)"""% (self.TABLE_BUILD_ID,
self.TABLE_BUILD_ID,
self.TABLE_BUILD_ID,
self.TABLE_BUILD_ID,
self.TABLE_BUILD_ID,
self.TABLE_BUILD_ID,
self.escape_string(name),
self.escape_string(desc),
self.escape_string(location),
type,
status)
index = self.insert(query) # Provide inserted record PK
return index
def get_table_entry_pk(self, table, column, value, update_db=True):
""" Checks for entries in tables with two columns (<TABLE_NAME>_pk, <column>)
If update_db is True updates table entry if value in specified column doesn't exist
"""
# TODO: table buffering
result = None
table_pk = '%s_pk'% table
query = """SELECT `%s`
FROM `%s`
WHERE `%s`='%s'"""% (table_pk,
table,
column,
self.escape_string(value))
rows = self.select_all(query)
if len(rows) == 1:
result = rows[0][table_pk]
elif len(rows) == 0 and update_db:
# Update DB with new value
result = self.update_table_entry(table, column, value)
return result
def update_table_entry(self, table, column, value):
""" Updates table entry if value in specified column doesn't exist
Locks table to perform atomic read + update
"""
result = None
con = self.db_object
cur = con.cursor()
cur.execute("LOCK TABLES `%s` WRITE"% table)
table_pk = '%s_pk'% table
query = """SELECT `%s`
FROM `%s`
WHERE `%s`='%s'"""% (table_pk,
table,
column,
self.escape_string(value))
cur.execute(query)
rows = cur.fetchall()
if len(rows) == 0:
query = """INSERT INTO `%s` (%s)
VALUES ('%s')"""% (table,
column,
self.escape_string(value))
cur.execute(query)
result = cur.lastrowid
con.commit()
cur.execute("UNLOCK TABLES")
return result
def update_build_id_info(self, build_id, **kw):
""" Update additional data inside build_id table
Examples:
db.update_build_id_info(build_id, _status_fk=self.BUILD_ID_STATUS_COMPLETED, _shuffle_seed=0.0123456789):
"""
if len(kw):
con = self.db_object
cur = con.cursor()
# Prepare UPDATE query
# ["`mtest_build_id_pk`=[value-1]", "`mtest_build_id_name`=[value-2]", "`mtest_build_id_desc`=[value-3]"]
set_list = []
for col_sufix in kw:
assign_str = "`%s%s`='%s'"% (self.TABLE_BUILD_ID, col_sufix, self.escape_string(str(kw[col_sufix])))
set_list.append(assign_str)
set_str = ', '.join(set_list)
query = """UPDATE `%s`
SET %s
WHERE `mtest_build_id_pk`=%d"""% (self.TABLE_BUILD_ID,
set_str,
build_id)
cur.execute(query)
con.commit()
def insert_test_entry(self, build_id, target, toolchain, test_type, test_id, test_result, test_output, test_time, test_timeout, test_loop, test_extra=''):
""" Inserts test result entry to database. All checks regarding existing
toolchain names in DB are performed.
If some data is missing DB will be updated
"""
# Get all table FK and if entry is new try to insert new value
target_fk = self.get_table_entry_pk(self.TABLE_TARGET, self.TABLE_TARGET + '_name', target)
toolchain_fk = self.get_table_entry_pk(self.TABLE_TOOLCHAIN, self.TABLE_TOOLCHAIN + '_name', toolchain)
test_type_fk = self.get_table_entry_pk(self.TABLE_TEST_TYPE, self.TABLE_TEST_TYPE + '_name', test_type)
test_id_fk = self.get_table_entry_pk(self.TABLE_TEST_ID, self.TABLE_TEST_ID + '_name', test_id)
test_result_fk = self.get_table_entry_pk(self.TABLE_TEST_RESULT, self.TABLE_TEST_RESULT + '_name', test_result)
con = self.db_object
cur = con.cursor()
query = """ INSERT INTO `%s` (`mtest_build_id_fk`,
`mtest_target_fk`,
`mtest_toolchain_fk`,
`mtest_test_type_fk`,
`mtest_test_id_fk`,
`mtest_test_result_fk`,
`mtest_test_output`,
`mtest_test_time`,
`mtest_test_timeout`,
`mtest_test_loop_no`,
`mtest_test_result_extra`)
VALUES (%d, %d, %d, %d, %d, %d, '%s', %.2f, %.2f, %d, '%s')"""% (self.TABLE_TEST_ENTRY,
build_id,
target_fk,
toolchain_fk,
test_type_fk,
test_id_fk,
test_result_fk,
self.escape_string(test_output),
test_time,
test_timeout,
test_loop,
self.escape_string(test_extra))
cur.execute(query)
con.commit()
|
apache-2.0
|
alexus37/AugmentedRealityChess
|
pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGL/GL/APPLE/specular_vector.py
|
9
|
2009
|
'''OpenGL extension APPLE.specular_vector
This module customises the behaviour of the
OpenGL.raw.GL.APPLE.specular_vector to provide a more
Python-friendly API
Overview (from the spec)
An alternative specular lighting model is enabled by passing
the LIGHT_MODEL_SPECULAR_VECTOR token as the <pname> parameter
to LightModel, and TRUE as the <param> parameter. The specular
vector lighting model calculates the specular intensity as the
dot product of the true reflection vector of the light source
and the vector from the vertex to the viewpoint. This yields
results that are visually similar to but often more realistic
than the existing lighting model.
Mathematically, the specular component s.n in the existing
lighting model calculation is replaced with the following
alternative calculation.
Given three vectors, n, l, and p, where n is the unit normal
vector at the vertex, l is the unit vector from the vertex to
the light position, and p is the unit vector from the vertex
to the viewpoint (or the vector {0,0,1} if
LIGHT_MODEL_LOCAL_VIEWER is false), the specular component is
given by
(2 * cross(n, cross(n, l)) + l) . p
All other lighting model and material parameters (shininess,
spotlight, attenuation, local viewer, and direction/positional
sources) operate normally. The specular vector lighting model
affects both rgba and index modes.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/APPLE/specular_vector.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.APPLE.specular_vector import *
from OpenGL.raw.GL.APPLE.specular_vector import _EXTENSION_NAME
def glInitSpecularVectorAPPLE():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION
|
mit
|
andante20/volatility
|
volatility/plugins/mac/psaux.py
|
45
|
1804
|
# Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: [email protected]
@organization:
"""
import volatility.plugins.mac.pstasks as pstasks
class mac_psaux(pstasks.mac_tasks):
""" Prints processes with arguments in user land (**argv) """
def render_text(self, outfd, data):
self.table_header(outfd, [("Pid", "8"),
("Name", "20"),
("Bits", "16"),
("Stack", "#018x"),
("Length", "8"),
("Argc", "8"),
("Arguments", "")])
for proc in data:
self.table_row(outfd,
proc.p_pid,
proc.p_comm,
str(proc.task.map.pmap.pm_task_map or '')[9:],
proc.user_stack,
proc.p_argslen,
proc.p_argc,
proc.get_arguments())
|
gpl-2.0
|
yigitguler/django
|
django/conf/locale/cs/formats.py
|
115
|
1702
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. E Y'
TIME_FORMAT = 'G:i'
DATETIME_FORMAT = 'j. E Y G:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y G:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', '%d.%m.%y', # '05.01.2006', '05.01.06'
'%d. %m. %Y', '%d. %m. %y', # '5. 1. 2006', '5. 1. 06'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
)
# Kept ISO formats as one is in first position
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '04:30:59'
'%H.%M', # '04.30'
'%H:%M', # '04:30'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '05.01.2006 04:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '05.01.2006 04:30:59.000200'
'%d.%m.%Y %H.%M', # '05.01.2006 04.30'
'%d.%m.%Y %H:%M', # '05.01.2006 04:30'
'%d.%m.%Y', # '05.01.2006'
'%d. %m. %Y %H:%M:%S', # '05. 01. 2006 04:30:59'
'%d. %m. %Y %H:%M:%S.%f', # '05. 01. 2006 04:30:59.000200'
'%d. %m. %Y %H.%M', # '05. 01. 2006 04.30'
'%d. %m. %Y %H:%M', # '05. 01. 2006 04:30'
'%d. %m. %Y', # '05. 01. 2006'
'%Y-%m-%d %H.%M', # '2006-01-05 04.30'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
|
bsd-3-clause
|
jazzk/I9505GUEUDNL3
|
tools/perf/scripts/python/failed-syscalls-by-pid.py
|
11180
|
2058
|
# failed system call counts, by pid
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide failed system call totals, broken down by pid.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_error_totals()
def raw_syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, ret):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
if ret < 0:
try:
syscalls[common_comm][common_pid][id][ret] += 1
except TypeError:
syscalls[common_comm][common_pid][id][ret] = 1
def print_error_totals():
if for_comm is not None:
print "\nsyscall errors for %s:\n\n" % (for_comm),
else:
print "\nsyscall errors:\n\n",
print "%-30s %10s\n" % ("comm [pid]", "count"),
print "%-30s %10s\n" % ("------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id in id_keys:
print " syscall: %-16s\n" % syscall_name(id),
ret_keys = syscalls[comm][pid][id].keys()
for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True):
print " err = %-20s %10d\n" % (strerror(ret), val),
|
gpl-2.0
|
minatorii/loan-and-debt-management-system
|
node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
|
1355
|
44604
|
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""cmake output module
This module is under development and should be considered experimental.
This module produces cmake (2.8.8+) input as its output. One CMakeLists.txt is
created for each configuration.
This module's original purpose was to support editing in IDEs like KDevelop
which use CMake for project management. It is also possible to use CMake to
generate projects for other IDEs such as eclipse cdt and code::blocks. QtCreator
will convert the CMakeLists.txt to a code::blocks cbp for the editor to read,
but build using CMake. As a result QtCreator editor is unaware of compiler
defines. The generated CMakeLists.txt can also be used to build on Linux. There
is currently no support for building on platforms other than Linux.
The generated CMakeLists.txt should properly compile all projects. However,
there is a mismatch between gyp and cmake with regard to linking. All attempts
are made to work around this, but CMake sometimes sees -Wl,--start-group as a
library and incorrectly repeats it. As a result the output of this generator
should not be relied on for building.
When using with kdevelop, use version 4.4+. Previous versions of kdevelop will
not be able to find the header file directories described in the generated
CMakeLists.txt file.
"""
import multiprocessing
import os
import signal
import string
import subprocess
import gyp.common
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_PREFIX': 'lib',
'SHARED_LIB_SUFFIX': '.so',
'SHARED_LIB_DIR': '${builddir}/lib.${TOOLSET}',
'LIB_DIR': '${obj}.${TOOLSET}',
'INTERMEDIATE_DIR': '${obj}.${TOOLSET}/${TARGET}/geni',
'SHARED_INTERMEDIATE_DIR': '${obj}/gen',
'PRODUCT_DIR': '${builddir}',
'RULE_INPUT_PATH': '${RULE_INPUT_PATH}',
'RULE_INPUT_DIRNAME': '${RULE_INPUT_DIRNAME}',
'RULE_INPUT_NAME': '${RULE_INPUT_NAME}',
'RULE_INPUT_ROOT': '${RULE_INPUT_ROOT}',
'RULE_INPUT_EXT': '${RULE_INPUT_EXT}',
'CONFIGURATION_NAME': '${configuration}',
}
FULL_PATH_VARS = ('${CMAKE_CURRENT_LIST_DIR}', '${builddir}', '${obj}')
generator_supports_multiple_toolsets = True
generator_wants_static_library_dependencies_adjusted = True
COMPILABLE_EXTENSIONS = {
'.c': 'cc',
'.cc': 'cxx',
'.cpp': 'cxx',
'.cxx': 'cxx',
'.s': 's', # cc
'.S': 's', # cc
}
def RemovePrefix(a, prefix):
"""Returns 'a' without 'prefix' if it starts with 'prefix'."""
return a[len(prefix):] if a.startswith(prefix) else a
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
default_variables.setdefault('OS', gyp.common.GetFlavor(params))
def Compilable(filename):
"""Return true if the file is compilable (should be in OBJS)."""
return any(filename.endswith(e) for e in COMPILABLE_EXTENSIONS)
def Linkable(filename):
"""Return true if the file is linkable (should be on the link line)."""
return filename.endswith('.o')
def NormjoinPathForceCMakeSource(base_path, rel_path):
"""Resolves rel_path against base_path and returns the result.
If rel_path is an absolute path it is returned unchanged.
Otherwise it is resolved against base_path and normalized.
If the result is a relative path, it is forced to be relative to the
CMakeLists.txt.
"""
if os.path.isabs(rel_path):
return rel_path
if any([rel_path.startswith(var) for var in FULL_PATH_VARS]):
return rel_path
# TODO: do we need to check base_path for absolute variables as well?
return os.path.join('${CMAKE_CURRENT_LIST_DIR}',
os.path.normpath(os.path.join(base_path, rel_path)))
def NormjoinPath(base_path, rel_path):
"""Resolves rel_path against base_path and returns the result.
TODO: what is this really used for?
If rel_path begins with '$' it is returned unchanged.
Otherwise it is resolved against base_path if relative, then normalized.
"""
if rel_path.startswith('$') and not rel_path.startswith('${configuration}'):
return rel_path
return os.path.normpath(os.path.join(base_path, rel_path))
def CMakeStringEscape(a):
"""Escapes the string 'a' for use inside a CMake string.
This means escaping
'\' otherwise it may be seen as modifying the next character
'"' otherwise it will end the string
';' otherwise the string becomes a list
The following do not need to be escaped
'#' when the lexer is in string state, this does not start a comment
The following are yet unknown
'$' generator variables (like ${obj}) must not be escaped,
but text $ should be escaped
what is wanted is to know which $ come from generator variables
"""
return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
def SetFileProperty(output, source_name, property_name, values, sep):
"""Given a set of source file, sets the given property on them."""
output.write('set_source_files_properties(')
output.write(source_name)
output.write(' PROPERTIES ')
output.write(property_name)
output.write(' "')
for value in values:
output.write(CMakeStringEscape(value))
output.write(sep)
output.write('")\n')
def SetFilesProperty(output, variable, property_name, values, sep):
"""Given a set of source files, sets the given property on them."""
output.write('set_source_files_properties(')
WriteVariable(output, variable)
output.write(' PROPERTIES ')
output.write(property_name)
output.write(' "')
for value in values:
output.write(CMakeStringEscape(value))
output.write(sep)
output.write('")\n')
def SetTargetProperty(output, target_name, property_name, values, sep=''):
"""Given a target, sets the given property."""
output.write('set_target_properties(')
output.write(target_name)
output.write(' PROPERTIES ')
output.write(property_name)
output.write(' "')
for value in values:
output.write(CMakeStringEscape(value))
output.write(sep)
output.write('")\n')
def SetVariable(output, variable_name, value):
"""Sets a CMake variable."""
output.write('set(')
output.write(variable_name)
output.write(' "')
output.write(CMakeStringEscape(value))
output.write('")\n')
def SetVariableList(output, variable_name, values):
"""Sets a CMake variable to a list."""
if not values:
return SetVariable(output, variable_name, "")
if len(values) == 1:
return SetVariable(output, variable_name, values[0])
output.write('list(APPEND ')
output.write(variable_name)
output.write('\n "')
output.write('"\n "'.join([CMakeStringEscape(value) for value in values]))
output.write('")\n')
def UnsetVariable(output, variable_name):
"""Unsets a CMake variable."""
output.write('unset(')
output.write(variable_name)
output.write(')\n')
def WriteVariable(output, variable_name, prepend=None):
if prepend:
output.write(prepend)
output.write('${')
output.write(variable_name)
output.write('}')
class CMakeTargetType(object):
def __init__(self, command, modifier, property_modifier):
self.command = command
self.modifier = modifier
self.property_modifier = property_modifier
cmake_target_type_from_gyp_target_type = {
'executable': CMakeTargetType('add_executable', None, 'RUNTIME'),
'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE'),
'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY'),
'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY'),
'none': CMakeTargetType('add_custom_target', 'SOURCES', None),
}
def StringToCMakeTargetName(a):
"""Converts the given string 'a' to a valid CMake target name.
All invalid characters are replaced by '_'.
Invalid for cmake: ' ', '/', '(', ')', '"'
Invalid for make: ':'
Invalid for unknown reasons but cause failures: '.'
"""
return a.translate(string.maketrans(' /():."', '_______'))
def WriteActions(target_name, actions, extra_sources, extra_deps,
path_to_gyp, output):
"""Write CMake for the 'actions' in the target.
Args:
target_name: the name of the CMake target being generated.
actions: the Gyp 'actions' dict for this target.
extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
extra_deps: [<cmake_taget>] to append with generated targets.
path_to_gyp: relative path from CMakeLists.txt being generated to
the Gyp file in which the target being generated is defined.
"""
for action in actions:
action_name = StringToCMakeTargetName(action['action_name'])
action_target_name = '%s__%s' % (target_name, action_name)
inputs = action['inputs']
inputs_name = action_target_name + '__input'
SetVariableList(output, inputs_name,
[NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
outputs = action['outputs']
cmake_outputs = [NormjoinPathForceCMakeSource(path_to_gyp, out)
for out in outputs]
outputs_name = action_target_name + '__output'
SetVariableList(output, outputs_name, cmake_outputs)
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
if int(action.get('process_outputs_as_sources', False)):
extra_sources.extend(zip(cmake_outputs, outputs))
# add_custom_command
output.write('add_custom_command(OUTPUT ')
WriteVariable(output, outputs_name)
output.write('\n')
if len(dirs) > 0:
for directory in dirs:
output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ')
output.write(directory)
output.write('\n')
output.write(' COMMAND ')
output.write(gyp.common.EncodePOSIXShellList(action['action']))
output.write('\n')
output.write(' DEPENDS ')
WriteVariable(output, inputs_name)
output.write('\n')
output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
output.write(path_to_gyp)
output.write('\n')
output.write(' COMMENT ')
if 'message' in action:
output.write(action['message'])
else:
output.write(action_target_name)
output.write('\n')
output.write(' VERBATIM\n')
output.write(')\n')
# add_custom_target
output.write('add_custom_target(')
output.write(action_target_name)
output.write('\n DEPENDS ')
WriteVariable(output, outputs_name)
output.write('\n SOURCES ')
WriteVariable(output, inputs_name)
output.write('\n)\n')
extra_deps.append(action_target_name)
def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source):
if rel_path.startswith(("${RULE_INPUT_PATH}","${RULE_INPUT_DIRNAME}")):
if any([rule_source.startswith(var) for var in FULL_PATH_VARS]):
return rel_path
return NormjoinPathForceCMakeSource(base_path, rel_path)
def WriteRules(target_name, rules, extra_sources, extra_deps,
path_to_gyp, output):
"""Write CMake for the 'rules' in the target.
Args:
target_name: the name of the CMake target being generated.
actions: the Gyp 'actions' dict for this target.
extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
extra_deps: [<cmake_taget>] to append with generated targets.
path_to_gyp: relative path from CMakeLists.txt being generated to
the Gyp file in which the target being generated is defined.
"""
for rule in rules:
rule_name = StringToCMakeTargetName(target_name + '__' + rule['rule_name'])
inputs = rule.get('inputs', [])
inputs_name = rule_name + '__input'
SetVariableList(output, inputs_name,
[NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
outputs = rule['outputs']
var_outputs = []
for count, rule_source in enumerate(rule.get('rule_sources', [])):
action_name = rule_name + '_' + str(count)
rule_source_dirname, rule_source_basename = os.path.split(rule_source)
rule_source_root, rule_source_ext = os.path.splitext(rule_source_basename)
SetVariable(output, 'RULE_INPUT_PATH', rule_source)
SetVariable(output, 'RULE_INPUT_DIRNAME', rule_source_dirname)
SetVariable(output, 'RULE_INPUT_NAME', rule_source_basename)
SetVariable(output, 'RULE_INPUT_ROOT', rule_source_root)
SetVariable(output, 'RULE_INPUT_EXT', rule_source_ext)
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
# Create variables for the output, as 'local' variable will be unset.
these_outputs = []
for output_index, out in enumerate(outputs):
output_name = action_name + '_' + str(output_index)
SetVariable(output, output_name,
NormjoinRulePathForceCMakeSource(path_to_gyp, out,
rule_source))
if int(rule.get('process_outputs_as_sources', False)):
extra_sources.append(('${' + output_name + '}', out))
these_outputs.append('${' + output_name + '}')
var_outputs.append('${' + output_name + '}')
# add_custom_command
output.write('add_custom_command(OUTPUT\n')
for out in these_outputs:
output.write(' ')
output.write(out)
output.write('\n')
for directory in dirs:
output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ')
output.write(directory)
output.write('\n')
output.write(' COMMAND ')
output.write(gyp.common.EncodePOSIXShellList(rule['action']))
output.write('\n')
output.write(' DEPENDS ')
WriteVariable(output, inputs_name)
output.write(' ')
output.write(NormjoinPath(path_to_gyp, rule_source))
output.write('\n')
# CMAKE_CURRENT_LIST_DIR is where the CMakeLists.txt lives.
# The cwd is the current build directory.
output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
output.write(path_to_gyp)
output.write('\n')
output.write(' COMMENT ')
if 'message' in rule:
output.write(rule['message'])
else:
output.write(action_name)
output.write('\n')
output.write(' VERBATIM\n')
output.write(')\n')
UnsetVariable(output, 'RULE_INPUT_PATH')
UnsetVariable(output, 'RULE_INPUT_DIRNAME')
UnsetVariable(output, 'RULE_INPUT_NAME')
UnsetVariable(output, 'RULE_INPUT_ROOT')
UnsetVariable(output, 'RULE_INPUT_EXT')
# add_custom_target
output.write('add_custom_target(')
output.write(rule_name)
output.write(' DEPENDS\n')
for out in var_outputs:
output.write(' ')
output.write(out)
output.write('\n')
output.write('SOURCES ')
WriteVariable(output, inputs_name)
output.write('\n')
for rule_source in rule.get('rule_sources', []):
output.write(' ')
output.write(NormjoinPath(path_to_gyp, rule_source))
output.write('\n')
output.write(')\n')
extra_deps.append(rule_name)
def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
"""Write CMake for the 'copies' in the target.
Args:
target_name: the name of the CMake target being generated.
actions: the Gyp 'actions' dict for this target.
extra_deps: [<cmake_taget>] to append with generated targets.
path_to_gyp: relative path from CMakeLists.txt being generated to
the Gyp file in which the target being generated is defined.
"""
copy_name = target_name + '__copies'
# CMake gets upset with custom targets with OUTPUT which specify no output.
have_copies = any(copy['files'] for copy in copies)
if not have_copies:
output.write('add_custom_target(')
output.write(copy_name)
output.write(')\n')
extra_deps.append(copy_name)
return
class Copy(object):
def __init__(self, ext, command):
self.cmake_inputs = []
self.cmake_outputs = []
self.gyp_inputs = []
self.gyp_outputs = []
self.ext = ext
self.inputs_name = None
self.outputs_name = None
self.command = command
file_copy = Copy('', 'copy')
dir_copy = Copy('_dirs', 'copy_directory')
for copy in copies:
files = copy['files']
destination = copy['destination']
for src in files:
path = os.path.normpath(src)
basename = os.path.split(path)[1]
dst = os.path.join(destination, basename)
copy = file_copy if os.path.basename(src) else dir_copy
copy.cmake_inputs.append(NormjoinPathForceCMakeSource(path_to_gyp, src))
copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst))
copy.gyp_inputs.append(src)
copy.gyp_outputs.append(dst)
for copy in (file_copy, dir_copy):
if copy.cmake_inputs:
copy.inputs_name = copy_name + '__input' + copy.ext
SetVariableList(output, copy.inputs_name, copy.cmake_inputs)
copy.outputs_name = copy_name + '__output' + copy.ext
SetVariableList(output, copy.outputs_name, copy.cmake_outputs)
# add_custom_command
output.write('add_custom_command(\n')
output.write('OUTPUT')
for copy in (file_copy, dir_copy):
if copy.outputs_name:
WriteVariable(output, copy.outputs_name, ' ')
output.write('\n')
for copy in (file_copy, dir_copy):
for src, dst in zip(copy.gyp_inputs, copy.gyp_outputs):
# 'cmake -E copy src dst' will create the 'dst' directory if needed.
output.write('COMMAND ${CMAKE_COMMAND} -E %s ' % copy.command)
output.write(src)
output.write(' ')
output.write(dst)
output.write("\n")
output.write('DEPENDS')
for copy in (file_copy, dir_copy):
if copy.inputs_name:
WriteVariable(output, copy.inputs_name, ' ')
output.write('\n')
output.write('WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
output.write(path_to_gyp)
output.write('\n')
output.write('COMMENT Copying for ')
output.write(target_name)
output.write('\n')
output.write('VERBATIM\n')
output.write(')\n')
# add_custom_target
output.write('add_custom_target(')
output.write(copy_name)
output.write('\n DEPENDS')
for copy in (file_copy, dir_copy):
if copy.outputs_name:
WriteVariable(output, copy.outputs_name, ' ')
output.write('\n SOURCES')
if file_copy.inputs_name:
WriteVariable(output, file_copy.inputs_name, ' ')
output.write('\n)\n')
extra_deps.append(copy_name)
def CreateCMakeTargetBaseName(qualified_target):
"""This is the name we would like the target to have."""
_, gyp_target_name, gyp_target_toolset = (
gyp.common.ParseQualifiedTarget(qualified_target))
cmake_target_base_name = gyp_target_name
if gyp_target_toolset and gyp_target_toolset != 'target':
cmake_target_base_name += '_' + gyp_target_toolset
return StringToCMakeTargetName(cmake_target_base_name)
def CreateCMakeTargetFullName(qualified_target):
"""An unambiguous name for the target."""
gyp_file, gyp_target_name, gyp_target_toolset = (
gyp.common.ParseQualifiedTarget(qualified_target))
cmake_target_full_name = gyp_file + ':' + gyp_target_name
if gyp_target_toolset and gyp_target_toolset != 'target':
cmake_target_full_name += '_' + gyp_target_toolset
return StringToCMakeTargetName(cmake_target_full_name)
class CMakeNamer(object):
"""Converts Gyp target names into CMake target names.
CMake requires that target names be globally unique. One way to ensure
this is to fully qualify the names of the targets. Unfortunatly, this
ends up with all targets looking like "chrome_chrome_gyp_chrome" instead
of just "chrome". If this generator were only interested in building, it
would be possible to fully qualify all target names, then create
unqualified target names which depend on all qualified targets which
should have had that name. This is more or less what the 'make' generator
does with aliases. However, one goal of this generator is to create CMake
files for use with IDEs, and fully qualified names are not as user
friendly.
Since target name collision is rare, we do the above only when required.
Toolset variants are always qualified from the base, as this is required for
building. However, it also makes sense for an IDE, as it is possible for
defines to be different.
"""
def __init__(self, target_list):
self.cmake_target_base_names_conficting = set()
cmake_target_base_names_seen = set()
for qualified_target in target_list:
cmake_target_base_name = CreateCMakeTargetBaseName(qualified_target)
if cmake_target_base_name not in cmake_target_base_names_seen:
cmake_target_base_names_seen.add(cmake_target_base_name)
else:
self.cmake_target_base_names_conficting.add(cmake_target_base_name)
def CreateCMakeTargetName(self, qualified_target):
base_name = CreateCMakeTargetBaseName(qualified_target)
if base_name in self.cmake_target_base_names_conficting:
return CreateCMakeTargetFullName(qualified_target)
return base_name
def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
options, generator_flags, all_qualified_targets, output):
# The make generator does this always.
# TODO: It would be nice to be able to tell CMake all dependencies.
circular_libs = generator_flags.get('circular', True)
if not generator_flags.get('standalone', False):
output.write('\n#')
output.write(qualified_target)
output.write('\n')
gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir)
rel_gyp_dir = os.path.dirname(rel_gyp_file)
# Relative path from build dir to top dir.
build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir)
# Relative path from build dir to gyp dir.
build_to_gyp = os.path.join(build_to_top, rel_gyp_dir)
path_from_cmakelists_to_gyp = build_to_gyp
spec = target_dicts.get(qualified_target, {})
config = spec.get('configurations', {}).get(config_to_use, {})
target_name = spec.get('target_name', '<missing target name>')
target_type = spec.get('type', '<missing target type>')
target_toolset = spec.get('toolset')
cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
if cmake_target_type is None:
print ('Target %s has unknown target type %s, skipping.' %
( target_name, target_type ) )
return
SetVariable(output, 'TARGET', target_name)
SetVariable(output, 'TOOLSET', target_toolset)
cmake_target_name = namer.CreateCMakeTargetName(qualified_target)
extra_sources = []
extra_deps = []
# Actions must come first, since they can generate more OBJs for use below.
if 'actions' in spec:
WriteActions(cmake_target_name, spec['actions'], extra_sources, extra_deps,
path_from_cmakelists_to_gyp, output)
# Rules must be early like actions.
if 'rules' in spec:
WriteRules(cmake_target_name, spec['rules'], extra_sources, extra_deps,
path_from_cmakelists_to_gyp, output)
# Copies
if 'copies' in spec:
WriteCopies(cmake_target_name, spec['copies'], extra_deps,
path_from_cmakelists_to_gyp, output)
# Target and sources
srcs = spec.get('sources', [])
# Gyp separates the sheep from the goats based on file extensions.
# A full separation is done here because of flag handing (see below).
s_sources = []
c_sources = []
cxx_sources = []
linkable_sources = []
other_sources = []
for src in srcs:
_, ext = os.path.splitext(src)
src_type = COMPILABLE_EXTENSIONS.get(ext, None)
src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src);
if src_type == 's':
s_sources.append(src_norm_path)
elif src_type == 'cc':
c_sources.append(src_norm_path)
elif src_type == 'cxx':
cxx_sources.append(src_norm_path)
elif Linkable(ext):
linkable_sources.append(src_norm_path)
else:
other_sources.append(src_norm_path)
for extra_source in extra_sources:
src, real_source = extra_source
_, ext = os.path.splitext(real_source)
src_type = COMPILABLE_EXTENSIONS.get(ext, None)
if src_type == 's':
s_sources.append(src)
elif src_type == 'cc':
c_sources.append(src)
elif src_type == 'cxx':
cxx_sources.append(src)
elif Linkable(ext):
linkable_sources.append(src)
else:
other_sources.append(src)
s_sources_name = None
if s_sources:
s_sources_name = cmake_target_name + '__asm_srcs'
SetVariableList(output, s_sources_name, s_sources)
c_sources_name = None
if c_sources:
c_sources_name = cmake_target_name + '__c_srcs'
SetVariableList(output, c_sources_name, c_sources)
cxx_sources_name = None
if cxx_sources:
cxx_sources_name = cmake_target_name + '__cxx_srcs'
SetVariableList(output, cxx_sources_name, cxx_sources)
linkable_sources_name = None
if linkable_sources:
linkable_sources_name = cmake_target_name + '__linkable_srcs'
SetVariableList(output, linkable_sources_name, linkable_sources)
other_sources_name = None
if other_sources:
other_sources_name = cmake_target_name + '__other_srcs'
SetVariableList(output, other_sources_name, other_sources)
# CMake gets upset when executable targets provide no sources.
# http://www.cmake.org/pipermail/cmake/2010-July/038461.html
dummy_sources_name = None
has_sources = (s_sources_name or
c_sources_name or
cxx_sources_name or
linkable_sources_name or
other_sources_name)
if target_type == 'executable' and not has_sources:
dummy_sources_name = cmake_target_name + '__dummy_srcs'
SetVariable(output, dummy_sources_name,
"${obj}.${TOOLSET}/${TARGET}/genc/dummy.c")
output.write('if(NOT EXISTS "')
WriteVariable(output, dummy_sources_name)
output.write('")\n')
output.write(' file(WRITE "')
WriteVariable(output, dummy_sources_name)
output.write('" "")\n')
output.write("endif()\n")
# CMake is opposed to setting linker directories and considers the practice
# of setting linker directories dangerous. Instead, it favors the use of
# find_library and passing absolute paths to target_link_libraries.
# However, CMake does provide the command link_directories, which adds
# link directories to targets defined after it is called.
# As a result, link_directories must come before the target definition.
# CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
library_dirs = config.get('library_dirs')
if library_dirs is not None:
output.write('link_directories(')
for library_dir in library_dirs:
output.write(' ')
output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir))
output.write('\n')
output.write(')\n')
output.write(cmake_target_type.command)
output.write('(')
output.write(cmake_target_name)
if cmake_target_type.modifier is not None:
output.write(' ')
output.write(cmake_target_type.modifier)
if s_sources_name:
WriteVariable(output, s_sources_name, ' ')
if c_sources_name:
WriteVariable(output, c_sources_name, ' ')
if cxx_sources_name:
WriteVariable(output, cxx_sources_name, ' ')
if linkable_sources_name:
WriteVariable(output, linkable_sources_name, ' ')
if other_sources_name:
WriteVariable(output, other_sources_name, ' ')
if dummy_sources_name:
WriteVariable(output, dummy_sources_name, ' ')
output.write(')\n')
# Let CMake know if the 'all' target should depend on this target.
exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets
else 'FALSE')
SetTargetProperty(output, cmake_target_name,
'EXCLUDE_FROM_ALL', exclude_from_all)
for extra_target_name in extra_deps:
SetTargetProperty(output, extra_target_name,
'EXCLUDE_FROM_ALL', exclude_from_all)
# Output name and location.
if target_type != 'none':
# Link as 'C' if there are no other files
if not c_sources and not cxx_sources:
SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C'])
# Mark uncompiled sources as uncompiled.
if other_sources_name:
output.write('set_source_files_properties(')
WriteVariable(output, other_sources_name, '')
output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n')
# Mark object sources as linkable.
if linkable_sources_name:
output.write('set_source_files_properties(')
WriteVariable(output, other_sources_name, '')
output.write(' PROPERTIES EXTERNAL_OBJECT "TRUE")\n')
# Output directory
target_output_directory = spec.get('product_dir')
if target_output_directory is None:
if target_type in ('executable', 'loadable_module'):
target_output_directory = generator_default_variables['PRODUCT_DIR']
elif target_type == 'shared_library':
target_output_directory = '${builddir}/lib.${TOOLSET}'
elif spec.get('standalone_static_library', False):
target_output_directory = generator_default_variables['PRODUCT_DIR']
else:
base_path = gyp.common.RelativePath(os.path.dirname(gyp_file),
options.toplevel_dir)
target_output_directory = '${obj}.${TOOLSET}'
target_output_directory = (
os.path.join(target_output_directory, base_path))
cmake_target_output_directory = NormjoinPathForceCMakeSource(
path_from_cmakelists_to_gyp,
target_output_directory)
SetTargetProperty(output,
cmake_target_name,
cmake_target_type.property_modifier + '_OUTPUT_DIRECTORY',
cmake_target_output_directory)
# Output name
default_product_prefix = ''
default_product_name = target_name
default_product_ext = ''
if target_type == 'static_library':
static_library_prefix = generator_default_variables['STATIC_LIB_PREFIX']
default_product_name = RemovePrefix(default_product_name,
static_library_prefix)
default_product_prefix = static_library_prefix
default_product_ext = generator_default_variables['STATIC_LIB_SUFFIX']
elif target_type in ('loadable_module', 'shared_library'):
shared_library_prefix = generator_default_variables['SHARED_LIB_PREFIX']
default_product_name = RemovePrefix(default_product_name,
shared_library_prefix)
default_product_prefix = shared_library_prefix
default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX']
elif target_type != 'executable':
print ('ERROR: What output file should be generated?',
'type', target_type, 'target', target_name)
product_prefix = spec.get('product_prefix', default_product_prefix)
product_name = spec.get('product_name', default_product_name)
product_ext = spec.get('product_extension')
if product_ext:
product_ext = '.' + product_ext
else:
product_ext = default_product_ext
SetTargetProperty(output, cmake_target_name, 'PREFIX', product_prefix)
SetTargetProperty(output, cmake_target_name,
cmake_target_type.property_modifier + '_OUTPUT_NAME',
product_name)
SetTargetProperty(output, cmake_target_name, 'SUFFIX', product_ext)
# Make the output of this target referenceable as a source.
cmake_target_output_basename = product_prefix + product_name + product_ext
cmake_target_output = os.path.join(cmake_target_output_directory,
cmake_target_output_basename)
SetFileProperty(output, cmake_target_output, 'GENERATED', ['TRUE'], '')
# Includes
includes = config.get('include_dirs')
if includes:
# This (target include directories) is what requires CMake 2.8.8
includes_name = cmake_target_name + '__include_dirs'
SetVariableList(output, includes_name,
[NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
for include in includes])
output.write('set_property(TARGET ')
output.write(cmake_target_name)
output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ')
WriteVariable(output, includes_name, '')
output.write(')\n')
# Defines
defines = config.get('defines')
if defines is not None:
SetTargetProperty(output,
cmake_target_name,
'COMPILE_DEFINITIONS',
defines,
';')
# Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
# CMake currently does not have target C and CXX flags.
# So, instead of doing...
# cflags_c = config.get('cflags_c')
# if cflags_c is not None:
# SetTargetProperty(output, cmake_target_name,
# 'C_COMPILE_FLAGS', cflags_c, ' ')
# cflags_cc = config.get('cflags_cc')
# if cflags_cc is not None:
# SetTargetProperty(output, cmake_target_name,
# 'CXX_COMPILE_FLAGS', cflags_cc, ' ')
# Instead we must...
cflags = config.get('cflags', [])
cflags_c = config.get('cflags_c', [])
cflags_cxx = config.get('cflags_cc', [])
if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources):
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ')
elif c_sources and not (s_sources or cxx_sources):
flags = []
flags.extend(cflags)
flags.extend(cflags_c)
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
elif cxx_sources and not (s_sources or c_sources):
flags = []
flags.extend(cflags)
flags.extend(cflags_cxx)
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
else:
# TODO: This is broken, one cannot generally set properties on files,
# as other targets may require different properties on the same files.
if s_sources and cflags:
SetFilesProperty(output, s_sources_name, 'COMPILE_FLAGS', cflags, ' ')
if c_sources and (cflags or cflags_c):
flags = []
flags.extend(cflags)
flags.extend(cflags_c)
SetFilesProperty(output, c_sources_name, 'COMPILE_FLAGS', flags, ' ')
if cxx_sources and (cflags or cflags_cxx):
flags = []
flags.extend(cflags)
flags.extend(cflags_cxx)
SetFilesProperty(output, cxx_sources_name, 'COMPILE_FLAGS', flags, ' ')
# Linker flags
ldflags = config.get('ldflags')
if ldflags is not None:
SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
# Note on Dependencies and Libraries:
# CMake wants to handle link order, resolving the link line up front.
# Gyp does not retain or enforce specifying enough information to do so.
# So do as other gyp generators and use --start-group and --end-group.
# Give CMake as little information as possible so that it doesn't mess it up.
# Dependencies
rawDeps = spec.get('dependencies', [])
static_deps = []
shared_deps = []
other_deps = []
for rawDep in rawDeps:
dep_cmake_name = namer.CreateCMakeTargetName(rawDep)
dep_spec = target_dicts.get(rawDep, {})
dep_target_type = dep_spec.get('type', None)
if dep_target_type == 'static_library':
static_deps.append(dep_cmake_name)
elif dep_target_type == 'shared_library':
shared_deps.append(dep_cmake_name)
else:
other_deps.append(dep_cmake_name)
# ensure all external dependencies are complete before internal dependencies
# extra_deps currently only depend on their own deps, so otherwise run early
if static_deps or shared_deps or other_deps:
for extra_dep in extra_deps:
output.write('add_dependencies(')
output.write(extra_dep)
output.write('\n')
for deps in (static_deps, shared_deps, other_deps):
for dep in gyp.common.uniquer(deps):
output.write(' ')
output.write(dep)
output.write('\n')
output.write(')\n')
linkable = target_type in ('executable', 'loadable_module', 'shared_library')
other_deps.extend(extra_deps)
if other_deps or (not linkable and (static_deps or shared_deps)):
output.write('add_dependencies(')
output.write(cmake_target_name)
output.write('\n')
for dep in gyp.common.uniquer(other_deps):
output.write(' ')
output.write(dep)
output.write('\n')
if not linkable:
for deps in (static_deps, shared_deps):
for lib_dep in gyp.common.uniquer(deps):
output.write(' ')
output.write(lib_dep)
output.write('\n')
output.write(')\n')
# Libraries
if linkable:
external_libs = [lib for lib in spec.get('libraries', []) if len(lib) > 0]
if external_libs or static_deps or shared_deps:
output.write('target_link_libraries(')
output.write(cmake_target_name)
output.write('\n')
if static_deps:
write_group = circular_libs and len(static_deps) > 1
if write_group:
output.write('-Wl,--start-group\n')
for dep in gyp.common.uniquer(static_deps):
output.write(' ')
output.write(dep)
output.write('\n')
if write_group:
output.write('-Wl,--end-group\n')
if shared_deps:
for dep in gyp.common.uniquer(shared_deps):
output.write(' ')
output.write(dep)
output.write('\n')
if external_libs:
for lib in gyp.common.uniquer(external_libs):
output.write(' ')
output.write(lib)
output.write('\n')
output.write(')\n')
UnsetVariable(output, 'TOOLSET')
UnsetVariable(output, 'TARGET')
def GenerateOutputForConfig(target_list, target_dicts, data,
params, config_to_use):
options = params['options']
generator_flags = params['generator_flags']
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to cmake easier, cmake doesn't put anything here.
# Each Gyp configuration creates a different CMakeLists.txt file
# to avoid incompatibilities between Gyp and CMake configurations.
generator_dir = os.path.relpath(options.generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = generator_flags.get('output_dir', 'out')
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.normpath(os.path.join(generator_dir,
output_dir,
config_to_use))
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
output_file = os.path.join(toplevel_build, 'CMakeLists.txt')
gyp.common.EnsureDirExists(output_file)
output = open(output_file, 'w')
output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
output.write('cmake_policy(VERSION 2.8.8)\n')
gyp_file, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1])
output.write('project(')
output.write(project_target)
output.write(')\n')
SetVariable(output, 'configuration', config_to_use)
ar = None
cc = None
cxx = None
make_global_settings = data[gyp_file].get('make_global_settings', [])
build_to_top = gyp.common.InvertRelativePath(build_dir,
options.toplevel_dir)
for key, value in make_global_settings:
if key == 'AR':
ar = os.path.join(build_to_top, value)
if key == 'CC':
cc = os.path.join(build_to_top, value)
if key == 'CXX':
cxx = os.path.join(build_to_top, value)
ar = gyp.common.GetEnvironFallback(['AR_target', 'AR'], ar)
cc = gyp.common.GetEnvironFallback(['CC_target', 'CC'], cc)
cxx = gyp.common.GetEnvironFallback(['CXX_target', 'CXX'], cxx)
if ar:
SetVariable(output, 'CMAKE_AR', ar)
if cc:
SetVariable(output, 'CMAKE_C_COMPILER', cc)
if cxx:
SetVariable(output, 'CMAKE_CXX_COMPILER', cxx)
# The following appears to be as-yet undocumented.
# http://public.kitware.com/Bug/view.php?id=8392
output.write('enable_language(ASM)\n')
# ASM-ATT does not support .S files.
# output.write('enable_language(ASM-ATT)\n')
if cc:
SetVariable(output, 'CMAKE_ASM_COMPILER', cc)
SetVariable(output, 'builddir', '${CMAKE_CURRENT_BINARY_DIR}')
SetVariable(output, 'obj', '${builddir}/obj')
output.write('\n')
# TODO: Undocumented/unsupported (the CMake Java generator depends on it).
# CMake by default names the object resulting from foo.c to be foo.c.o.
# Gyp traditionally names the object resulting from foo.c foo.o.
# This should be irrelevant, but some targets extract .o files from .a
# and depend on the name of the extracted .o files.
output.write('set(CMAKE_C_OUTPUT_EXTENSION_REPLACE 1)\n')
output.write('set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n')
output.write('\n')
# Force ninja to use rsp files. Otherwise link and ar lines can get too long,
# resulting in 'Argument list too long' errors.
output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
output.write('\n')
namer = CMakeNamer(target_list)
# The list of targets upon which the 'all' target should depend.
# CMake has it's own implicit 'all' target, one is not created explicitly.
all_qualified_targets = set()
for build_file in params['build_files']:
for qualified_target in gyp.common.AllTargets(target_list,
target_dicts,
os.path.normpath(build_file)):
all_qualified_targets.add(qualified_target)
for qualified_target in target_list:
WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
options, generator_flags, all_qualified_targets, output)
output.close()
def PerformBuild(data, configurations, params):
options = params['options']
generator_flags = params['generator_flags']
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to cmake easier, cmake doesn't put anything here.
generator_dir = os.path.relpath(options.generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = generator_flags.get('output_dir', 'out')
for config_name in configurations:
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.normpath(os.path.join(generator_dir,
output_dir,
config_name))
arguments = ['cmake', '-G', 'Ninja']
print 'Generating [%s]: %s' % (config_name, arguments)
subprocess.check_call(arguments, cwd=build_dir)
arguments = ['ninja', '-C', build_dir]
print 'Building [%s]: %s' % (config_name, arguments)
subprocess.check_call(arguments)
def CallGenerateOutputForConfig(arglist):
# Ignore the interrupt signal so that the parent process catches it and
# kills all multiprocessing children.
signal.signal(signal.SIGINT, signal.SIG_IGN)
target_list, target_dicts, data, params, config_name = arglist
GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
def GenerateOutput(target_list, target_dicts, data, params):
user_config = params.get('generator_flags', {}).get('config', None)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data,
params, user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
if params['parallel']:
try:
pool = multiprocessing.Pool(len(config_names))
arglists = []
for config_name in config_names:
arglists.append((target_list, target_dicts, data,
params, config_name))
pool.map(CallGenerateOutputForConfig, arglists)
except KeyboardInterrupt, e:
pool.terminate()
raise e
else:
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data,
params, config_name)
|
mit
|
vivilyu/android_kernel_huawei_c8813
|
tools/perf/scripts/python/sched-migration.py
|
11215
|
11670
|
#!/usr/bin/python
#
# Cpu task migration overview toy
#
# Copyright (C) 2010 Frederic Weisbecker <[email protected]>
#
# perf script event handlers have been generated by perf script -g python
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import os
import sys
from collections import defaultdict
from UserList import UserList
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from SchedGui import *
threads = { 0 : "idle"}
def thread_name(pid):
return "%s:%d" % (threads[pid], pid)
class RunqueueEventUnknown:
@staticmethod
def color():
return None
def __repr__(self):
return "unknown"
class RunqueueEventSleep:
@staticmethod
def color():
return (0, 0, 0xff)
def __init__(self, sleeper):
self.sleeper = sleeper
def __repr__(self):
return "%s gone to sleep" % thread_name(self.sleeper)
class RunqueueEventWakeup:
@staticmethod
def color():
return (0xff, 0xff, 0)
def __init__(self, wakee):
self.wakee = wakee
def __repr__(self):
return "%s woke up" % thread_name(self.wakee)
class RunqueueEventFork:
@staticmethod
def color():
return (0, 0xff, 0)
def __init__(self, child):
self.child = child
def __repr__(self):
return "new forked task %s" % thread_name(self.child)
class RunqueueMigrateIn:
@staticmethod
def color():
return (0, 0xf0, 0xff)
def __init__(self, new):
self.new = new
def __repr__(self):
return "task migrated in %s" % thread_name(self.new)
class RunqueueMigrateOut:
@staticmethod
def color():
return (0xff, 0, 0xff)
def __init__(self, old):
self.old = old
def __repr__(self):
return "task migrated out %s" % thread_name(self.old)
class RunqueueSnapshot:
def __init__(self, tasks = [0], event = RunqueueEventUnknown()):
self.tasks = tuple(tasks)
self.event = event
def sched_switch(self, prev, prev_state, next):
event = RunqueueEventUnknown()
if taskState(prev_state) == "R" and next in self.tasks \
and prev in self.tasks:
return self
if taskState(prev_state) != "R":
event = RunqueueEventSleep(prev)
next_tasks = list(self.tasks[:])
if prev in self.tasks:
if taskState(prev_state) != "R":
next_tasks.remove(prev)
elif taskState(prev_state) == "R":
next_tasks.append(prev)
if next not in next_tasks:
next_tasks.append(next)
return RunqueueSnapshot(next_tasks, event)
def migrate_out(self, old):
if old not in self.tasks:
return self
next_tasks = [task for task in self.tasks if task != old]
return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old))
def __migrate_in(self, new, event):
if new in self.tasks:
self.event = event
return self
next_tasks = self.tasks[:] + tuple([new])
return RunqueueSnapshot(next_tasks, event)
def migrate_in(self, new):
return self.__migrate_in(new, RunqueueMigrateIn(new))
def wake_up(self, new):
return self.__migrate_in(new, RunqueueEventWakeup(new))
def wake_up_new(self, new):
return self.__migrate_in(new, RunqueueEventFork(new))
def load(self):
""" Provide the number of tasks on the runqueue.
Don't count idle"""
return len(self.tasks) - 1
def __repr__(self):
ret = self.tasks.__repr__()
ret += self.origin_tostring()
return ret
class TimeSlice:
def __init__(self, start, prev):
self.start = start
self.prev = prev
self.end = start
# cpus that triggered the event
self.event_cpus = []
if prev is not None:
self.total_load = prev.total_load
self.rqs = prev.rqs.copy()
else:
self.rqs = defaultdict(RunqueueSnapshot)
self.total_load = 0
def __update_total_load(self, old_rq, new_rq):
diff = new_rq.load() - old_rq.load()
self.total_load += diff
def sched_switch(self, ts_list, prev, prev_state, next, cpu):
old_rq = self.prev.rqs[cpu]
new_rq = old_rq.sched_switch(prev, prev_state, next)
if old_rq is new_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def migrate(self, ts_list, new, old_cpu, new_cpu):
if old_cpu == new_cpu:
return
old_rq = self.prev.rqs[old_cpu]
out_rq = old_rq.migrate_out(new)
self.rqs[old_cpu] = out_rq
self.__update_total_load(old_rq, out_rq)
new_rq = self.prev.rqs[new_cpu]
in_rq = new_rq.migrate_in(new)
self.rqs[new_cpu] = in_rq
self.__update_total_load(new_rq, in_rq)
ts_list.append(self)
if old_rq is not out_rq:
self.event_cpus.append(old_cpu)
self.event_cpus.append(new_cpu)
def wake_up(self, ts_list, pid, cpu, fork):
old_rq = self.prev.rqs[cpu]
if fork:
new_rq = old_rq.wake_up_new(pid)
else:
new_rq = old_rq.wake_up(pid)
if new_rq is old_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def next(self, t):
self.end = t
return TimeSlice(t, self)
class TimeSliceList(UserList):
def __init__(self, arg = []):
self.data = arg
def get_time_slice(self, ts):
if len(self.data) == 0:
slice = TimeSlice(ts, TimeSlice(-1, None))
else:
slice = self.data[-1].next(ts)
return slice
def find_time_slice(self, ts):
start = 0
end = len(self.data)
found = -1
searching = True
while searching:
if start == end or start == end - 1:
searching = False
i = (end + start) / 2
if self.data[i].start <= ts and self.data[i].end >= ts:
found = i
end = i
continue
if self.data[i].end < ts:
start = i
elif self.data[i].start > ts:
end = i
return found
def set_root_win(self, win):
self.root_win = win
def mouse_down(self, cpu, t):
idx = self.find_time_slice(t)
if idx == -1:
return
ts = self[idx]
rq = ts.rqs[cpu]
raw = "CPU: %d\n" % cpu
raw += "Last event : %s\n" % rq.event.__repr__()
raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000)
raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6))
raw += "Load = %d\n" % rq.load()
for t in rq.tasks:
raw += "%s \n" % thread_name(t)
self.root_win.update_summary(raw)
def update_rectangle_cpu(self, slice, cpu):
rq = slice.rqs[cpu]
if slice.total_load != 0:
load_rate = rq.load() / float(slice.total_load)
else:
load_rate = 0
red_power = int(0xff - (0xff * load_rate))
color = (0xff, red_power, red_power)
top_color = None
if cpu in slice.event_cpus:
top_color = rq.event.color()
self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end)
def fill_zone(self, start, end):
i = self.find_time_slice(start)
if i == -1:
return
for i in xrange(i, len(self.data)):
timeslice = self.data[i]
if timeslice.start > end:
return
for cpu in timeslice.rqs:
self.update_rectangle_cpu(timeslice, cpu)
def interval(self):
if len(self.data) == 0:
return (0, 0)
return (self.data[0].start, self.data[-1].end)
def nr_rectangles(self):
last_ts = self.data[-1]
max_cpu = 0
for cpu in last_ts.rqs:
if cpu > max_cpu:
max_cpu = cpu
return max_cpu
class SchedEventProxy:
def __init__(self):
self.current_tsk = defaultdict(lambda : -1)
self.timeslices = TimeSliceList()
def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
""" Ensure the task we sched out this cpu is really the one
we logged. Otherwise we may have missed traces """
on_cpu_task = self.current_tsk[headers.cpu]
if on_cpu_task != -1 and on_cpu_task != prev_pid:
print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \
(headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid)
threads[prev_pid] = prev_comm
threads[next_pid] = next_comm
self.current_tsk[headers.cpu] = next_pid
ts = self.timeslices.get_time_slice(headers.ts())
ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu)
def migrate(self, headers, pid, prio, orig_cpu, dest_cpu):
ts = self.timeslices.get_time_slice(headers.ts())
ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu)
def wake_up(self, headers, comm, pid, success, target_cpu, fork):
if success == 0:
return
ts = self.timeslices.get_time_slice(headers.ts())
ts.wake_up(self.timeslices, pid, target_cpu, fork)
def trace_begin():
global parser
parser = SchedEventProxy()
def trace_end():
app = wx.App(False)
timeslices = parser.timeslices
frame = RootFrame(timeslices, "Migration")
app.MainLoop()
def sched__sched_stat_runtime(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, runtime, vruntime):
pass
def sched__sched_stat_iowait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_sleep(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_process_fork(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
parent_comm, parent_pid, child_comm, child_pid):
pass
def sched__sched_process_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_free(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_migrate_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, orig_cpu,
dest_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.migrate(headers, pid, prio, orig_cpu, dest_cpu)
def sched__sched_switch(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio)
def sched__sched_wakeup_new(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 1)
def sched__sched_wakeup(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 0)
def sched__sched_wait_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_kthread_stop_ret(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
ret):
pass
def sched__sched_kthread_stop(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid):
pass
def trace_unhandled(event_name, context, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
pass
|
gpl-2.0
|
fzadow/CATMAID
|
django/applications/catmaid/control/flytem/models.py
|
2
|
3183
|
import json
import urllib2
from django.conf import settings
class FlyTEMDimension:
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
class FlyTEMProject:
def __init__(self, id):
self.id = id
self.title = id
class FlyTEMStack:
def __init__(self, project_id, stack_id):
self.project = project_id
self.id = stack_id
self.title = stack_id
self.image_base = '%s/project/%s/stack/%s/' % (settings.FLYTEM_SERVICE_URL, project_id, stack_id)
self.num_zoom_levels = -1
self.file_extension = 'jpg'
r = settings.FLYTEM_STACK_RESOLUTION
self.resolution = FlyTEMDimension(r[0], r[1], r[2])
self.tile_source_type = 7
self.tile_width = settings.FLYTEM_STACK_TILE_WIDTH
self.tile_height = settings.FLYTEM_STACK_TILE_HEIGHT
self.metadata = ''
self.trakem2_project = False
try:
url = '%s/project/%s/stack/%s/bounds' % (settings.FLYTEM_SERVICE_URL, project_id, stack_id)
bounds_json = urllib2.urlopen(url).read()
except urllib2.HTTPError as e:
raise ValueError("Couldn't retrieve FlyTEM project information from %s" % url)
except urllib2.URLError as e:
raise ValueError("Couldn't retrieve FlyTEM project information from %s" % url)
bounds_json = json.loads(bounds_json)
try:
url = '%s/project/%s/stack/%s/zValues' % (settings.FLYTEM_SERVICE_URL, project_id, stack_id)
zvalues_json = urllib2.urlopen(url).read()
except urllib2.HTTPError as e:
raise ValueError("Couldn't retrieve FlyTEM project information from %s" % url)
except urllib2.URLError as e:
raise ValueError("Couldn't retrieve FlyTEM project information from %s" % url)
zvalues_json = json.loads(zvalues_json)
zvalues = [int(v) for v in zvalues_json]
zvalues.sort()
# Dimensions
width = int(bounds_json['maxX'])
height = int(bounds_json['maxY'])
depth = zvalues[-1] + 1
self.dimension = FlyTEMDimension(width, height, depth)
# Broken slices
self.broken_slices = []
last = -1
for i in zvalues:
for j in range(last + 1, i):
self.broken_slices.append(j)
last = i
class FlyTEMProjectStacks:
def __init__(self):
try:
url = '%s/stackIds' % settings.FLYTEM_SERVICE_URL
project_stacks_json = urllib2.urlopen(url).read()
except urllib2.HTTPError as e:
raise ValueError("Couldn't retrieve FlyTEM project information from %s" % url)
except urllib2.URLError as e:
raise ValueError("Couldn't retrieve FlyTEM project information from %s" % url)
self.data = json.loads(project_stacks_json)
# Default to XY orientation
self.orientation = 0
# Default to no translation
self.translation = FlyTEMDimension(0, 0, 0)
def get_stack(self, project_id, stack_id):
return FlyTEMStack(project_id, stack_id)
def get_project(self, id):
return FlyTEMProject(id)
|
agpl-3.0
|
trnewman/VT-USRP-daughterboard-drivers
|
gr-atsc/src/python/fpll.py
|
15
|
2452
|
#!/usr/bin/env python
#
# Copyright 2004,2005 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
#
from gnuradio import gr, atsc
import math, os
def main():
print os.getpid()
tb = gr.top_block()
u = gr.file_source(gr.sizeof_float,"/tmp/atsc_pipe_2")
input_rate = 19.2e6
IF_freq = 5.75e6
# 1/2 as wide because we're designing lp filter
symbol_rate = atsc.ATSC_SYMBOL_RATE/2.
NTAPS = 279
tt = gr.firdes.root_raised_cosine (1.0, input_rate, symbol_rate, .115, NTAPS)
# heterodyne the low pass coefficients up to the specified bandpass
# center frequency. Note that when we do this, the filter bandwidth
# is effectively twice the low pass (2.69 * 2 = 5.38) and hence
# matches the diagram in the ATSC spec.
arg = 2. * math.pi * IF_freq / input_rate
t=[]
for i in range(len(tt)):
t += [tt[i] * 2. * math.cos(arg * i)]
rrc = gr.fir_filter_fff(1, t)
fpll = atsc.fpll()
pilot_freq = IF_freq - 3e6 + 0.31e6
lower_edge = 6e6 - 0.31e6
upper_edge = IF_freq - 3e6 + pilot_freq
transition_width = upper_edge - lower_edge
lp_coeffs = gr.firdes.low_pass (1.0,
input_rate,
(lower_edge + upper_edge) * 0.5,
transition_width,
gr.firdes.WIN_HAMMING);
lp_filter = gr.fir_filter_fff (1,lp_coeffs)
alpha = 1e-5
iir = gr.single_pole_iir_filter_ff(alpha)
remove_dc = gr.sub_ff()
out = gr.file_sink(gr.sizeof_float,"/tmp/atsc_pipe_3")
# out = gr.file_sink(gr.sizeof_float,"/mnt/sata/atsc_data_float")
tb.connect(u, fpll, lp_filter)
tb.connect(lp_filter, iir)
tb.connect(lp_filter, (remove_dc,0))
tb.connect(iir, (remove_dc,1))
tb.connect(remove_dc, out)
tb.run()
if __name__ == '__main__':
main ()
|
gpl-3.0
|
dhenrygithub/QGIS
|
python/plugins/processing/algs/grass7/ext/r_li_edgedensity.py
|
2
|
1289
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
r_li_edgedensity.py
-------------------
Date : February 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'February 2016'
__copyright__ = '(C) 2016, Médéric Ribreux'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from r_li import checkMovingWindow, configFile
def checkParameterValuesBeforeExecuting(alg):
checkMovingWindow(alg)
def processCommand(alg):
configFile(alg)
|
gpl-2.0
|
WindCanDie/spark
|
examples/src/main/python/ml/imputer_example.py
|
79
|
1513
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
An example demonstrating Imputer.
Run with:
bin/spark-submit examples/src/main/python/ml/imputer_example.py
"""
# $example on$
from pyspark.ml.feature import Imputer
# $example off$
from pyspark.sql import SparkSession
if __name__ == "__main__":
spark = SparkSession\
.builder\
.appName("ImputerExample")\
.getOrCreate()
# $example on$
df = spark.createDataFrame([
(1.0, float("nan")),
(2.0, float("nan")),
(float("nan"), 3.0),
(4.0, 4.0),
(5.0, 5.0)
], ["a", "b"])
imputer = Imputer(inputCols=["a", "b"], outputCols=["out_a", "out_b"])
model = imputer.fit(df)
model.transform(df).show()
# $example off$
spark.stop()
|
apache-2.0
|
stuckj/dupeguru
|
cocoa/base/ui/main_menu.py
|
1
|
4535
|
ownerclass = 'AppDelegateBase'
ownerimport = 'AppDelegateBase.h'
edition = args.get('edition', 'se')
result = Menu("")
appMenu = result.addMenu("dupeGuru")
fileMenu = result.addMenu("File")
editMenu = result.addMenu("Edit")
actionMenu = result.addMenu("Actions")
owner.columnsMenu = result.addMenu("Columns")
modeMenu = result.addMenu("Mode")
windowMenu = result.addMenu("Window")
helpMenu = result.addMenu("Help")
appMenu.addItem("About dupeGuru", Action(owner, 'showAboutBox'))
appMenu.addItem("Check for update...", Action(owner.updater, 'checkForUpdates:'))
appMenu.addSeparator()
appMenu.addItem("Preferences...", Action(owner, 'showPreferencesPanel'), 'cmd+,')
appMenu.addSeparator()
NSApp.servicesMenu = appMenu.addMenu("Services")
appMenu.addSeparator()
appMenu.addItem("Hide dupeGuru", Action(NSApp, 'hide:'), 'cmd+h')
appMenu.addItem("Hide Others", Action(NSApp, 'hideOtherApplications:'), 'cmd+alt+h')
appMenu.addItem("Show All", Action(NSApp, 'unhideAllApplications:'))
appMenu.addSeparator()
appMenu.addItem("Quit dupeGuru", Action(NSApp, 'terminate:'), 'cmd+q')
fileMenu.addItem("Load Results...", Action(None, 'loadResults'), 'cmd+o')
owner.recentResultsMenu = fileMenu.addMenu("Load Recent Results")
fileMenu.addItem("Save Results...", Action(None, 'saveResults'), 'cmd+s')
fileMenu.addItem("Export Results to XHTML", Action(owner.model, 'exportToXHTML'), 'cmd+shift+e')
fileMenu.addItem("Export Results to CSV", Action(owner.model, 'exportToCSV'))
if edition == 'pe':
fileMenu.addItem("Clear Picture Cache", Action(owner, 'clearPictureCache'), 'cmd+shift+p')
elif edition == 'me':
fileMenu.addItem("Remove Dead Tracks in iTunes", Action(owner, 'removeDeadTracks'))
editMenu.addItem("Mark All", Action(None, 'markAll'), 'cmd+a')
editMenu.addItem("Mark None", Action(None, 'markNone'), 'cmd+shift+a')
editMenu.addItem("Invert Marking", Action(None, 'markInvert'), 'cmd+alt+a')
editMenu.addItem("Mark Selected", Action(None, 'markSelected'), 'ctrl+cmd+a')
editMenu.addSeparator()
editMenu.addItem("Cut", Action(None, 'cut:'), 'cmd+x')
editMenu.addItem("Copy", Action(None, 'copy:'), 'cmd+c')
editMenu.addItem("Paste", Action(None, 'paste:'), 'cmd+v')
editMenu.addSeparator()
editMenu.addItem("Filter Results...", Action(None, 'focusOnFilterField'), 'cmd+alt+f')
actionMenu.addItem("Start Duplicate Scan", Action(owner, 'startScanning'), 'cmd+d')
actionMenu.addSeparator()
actionMenu.addItem("Send Marked to Trash...", Action(None, 'trashMarked'), 'cmd+t')
actionMenu.addItem("Move Marked to...", Action(None, 'moveMarked'), 'cmd+m')
actionMenu.addItem("Copy Marked to...", Action(None, 'copyMarked'), 'cmd+alt+m')
actionMenu.addItem("Remove Marked from Results", Action(None, 'removeMarked'), 'cmd+r')
actionMenu.addItem("Re-Prioritize Results...", Action(None, 'reprioritizeResults'))
actionMenu.addSeparator()
actionMenu.addItem("Remove Selected from Results", Action(None, 'removeSelected'), 'cmd+backspace')
actionMenu.addItem("Add Selected to Ignore List", Action(None, 'ignoreSelected'), 'cmd+g')
actionMenu.addItem("Make Selected into Reference", Action(None, 'switchSelected'), 'cmd+arrowup')
actionMenu.addSeparator()
actionMenu.addItem("Open Selected with Default Application", Action(None, 'openSelected'), 'cmd+return')
actionMenu.addItem("Reveal Selected in Finder", Action(None, 'revealSelected'), 'cmd+alt+return')
actionMenu.addItem("Invoke Custom Command", Action(None, 'invokeCustomCommand'), 'cmd+shift+c')
actionMenu.addItem("Rename Selected", Action(None, 'renameSelected'), 'enter')
modeMenu.addItem("Show Dupes Only", Action(None, 'togglePowerMarker'), 'cmd+1')
modeMenu.addItem("Show Delta Values", Action(None, 'toggleDelta'), 'cmd+2')
windowMenu.addItem("Results Window", Action(owner, 'showResultWindow'))
windowMenu.addItem("Folder Selection Window", Action(owner, 'showDirectoryWindow'))
windowMenu.addItem("Ignore List", Action(owner, 'showIgnoreList'))
windowMenu.addItem("Details Panel", Action(None, 'toggleDetailsPanel'), 'cmd+i')
windowMenu.addItem("Quick Look", Action(None, 'toggleQuicklookPanel'), 'cmd+l')
windowMenu.addSeparator()
windowMenu.addItem("Minimize", Action(None, 'performMinimize:'))
windowMenu.addItem("Zoom", Action(None, 'performZoom:'))
windowMenu.addItem("Close Window", Action(None, 'performClose:'), 'cmd+w')
windowMenu.addSeparator()
windowMenu.addItem("Bring All to Front", Action(None, 'arrangeInFront:'))
helpMenu.addItem("dupeGuru Help", Action(owner, 'openHelp'), 'cmd+?')
helpMenu.addItem("dupeGuru Website", Action(owner, 'openWebsite'))
|
gpl-3.0
|
Smart-Torvy/torvy-home-assistant
|
homeassistant/components/switch/modbus.py
|
6
|
2349
|
"""
Support for Modbus switches.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.modbus/
"""
import logging
import voluptuous as vol
import homeassistant.components.modbus as modbus
from homeassistant.const import CONF_NAME
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.helpers import config_validation as cv
from homeassistant.components.sensor import PLATFORM_SCHEMA
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['modbus']
CONF_COIL = "coil"
CONF_COILS = "coils"
CONF_SLAVE = "slave"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_COILS): [{
vol.Required(CONF_COIL): cv.positive_int,
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_SLAVE): cv.positive_int,
}]
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Read configuration and create Modbus devices."""
switches = []
for coil in config.get("coils"):
switches.append(ModbusCoilSwitch(
coil.get(CONF_NAME),
coil.get(CONF_SLAVE),
coil.get(CONF_COIL)))
add_devices(switches)
class ModbusCoilSwitch(ToggleEntity):
"""Representation of a Modbus switch."""
# pylint: disable=too-many-arguments
def __init__(self, name, slave, coil):
"""Initialize the switch."""
self._name = name
self._slave = int(slave) if slave else None
self._coil = int(coil)
self._is_on = None
@property
def is_on(self):
"""Return true if switch is on."""
return self._is_on
@property
def name(self):
"""Return the name of the switch."""
return self._name
def turn_on(self, **kwargs):
"""Set switch on."""
modbus.HUB.write_coil(self._slave, self._coil, True)
def turn_off(self, **kwargs):
"""Set switch off."""
modbus.HUB.write_coil(self._slave, self._coil, False)
def update(self):
"""Update the state of the switch."""
result = modbus.HUB.read_coils(self._slave, self._coil, 1)
if not result:
_LOGGER.error(
'No response from modbus slave %s coil %s',
self._slave,
self._coil)
return
self._is_on = bool(result.bits[0])
|
mit
|
cancro7/gem5
|
ext/ply/test/lex_object.py
|
174
|
1141
|
# -----------------------------------------------------------------------------
# lex_object.py
# -----------------------------------------------------------------------------
import sys
if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
class CalcLexer:
tokens = (
'NAME','NUMBER',
'PLUS','MINUS','TIMES','DIVIDE','EQUALS',
'LPAREN','RPAREN',
)
# Tokens
t_PLUS = r'\+'
t_MINUS = r'-'
t_TIMES = r'\*'
t_DIVIDE = r'/'
t_EQUALS = r'='
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
def t_NUMBER(self,t):
r'\d+'
try:
t.value = int(t.value)
except ValueError:
print("Integer value too large %s" % t.value)
t.value = 0
return t
t_ignore = " \t"
def t_newline(self,t):
r'\n+'
t.lineno += t.value.count("\n")
def t_error(self,t):
print("Illegal character '%s'" % t.value[0])
t.lexer.skip(1)
calc = CalcLexer()
# Build the lexer
lex.lex(object=calc)
lex.runmain(data="3+4")
|
bsd-3-clause
|
ee08b397/schematics
|
tests/test_functional.py
|
10
|
2051
|
from schematics.models import Model
from schematics.types import IntType, StringType
from schematics.validate import validate
from schematics.exceptions import ValidationError
def test_validate_simple_dict():
class Player(Model):
id = IntType()
validate(Player, {'id': 4})
def test_validate_keep_context_data():
class Player(Model):
id = IntType()
name = StringType()
p1 = Player({'id': 4})
data = validate(Player, {'name': 'Arthur'}, context=p1._data)
assert data == {'id': 4, 'name': 'Arthur'}
assert data != p1._data
def test_validate_override_context_data():
class Player(Model):
id = IntType()
p1 = Player({'id': 4})
data = validate(Player, {'id': 3}, context=p1._data)
assert data == {'id': 3}
def test_validate_ignore_extra_context_data():
class Player(Model):
id = IntType()
data = validate(Player, {'id': 4}, context={'name': 'Arthur'})
assert data == {'id': 4, 'name': 'Arthur'}
def test_validate_strict_with_context_data():
class Player(Model):
id = IntType()
try:
validate(Player, {'id': 4}, strict=True, context={'name': 'Arthur'})
except ValidationError as e:
assert 'name' in e.messages
def test_validate_partial_with_context_data():
class Player(Model):
id = IntType()
name = StringType(required=True)
data = validate(Player, {'id': 4}, partial=False, context={'name': 'Arthur'})
assert data == {'id': 4, 'name': 'Arthur'}
def test_validate_with_instance_level_validators():
class Player(Model):
id = IntType()
def validate_id(self, context, value):
if p1._initial['id'] != value:
p1._data['id'] = p1._initial['id']
raise ValidationError('Cannot change id')
p1 = Player({'id': 4})
p1.id = 3
try:
validate(Player, p1)
except ValidationError as e:
assert 'id' in e.messages
assert 'Cannot change id' in e.messages['id']
assert p1.id == 4
|
bsd-3-clause
|
alex/changes
|
changes/jobs/sync_build.py
|
1
|
3768
|
from datetime import datetime
from flask import current_app
from sqlalchemy.sql import func
from changes.config import db, queue
from changes.constants import Result, Status
from changes.db.utils import try_create
from changes.jobs.signals import fire_signal
from changes.models import Build, ItemStat, Job
from changes.utils.agg import safe_agg
from changes.queue.task import tracked_task
def aggregate_build_stat(build, name, func_=func.sum):
value = db.session.query(
func.coalesce(func_(ItemStat.value), 0),
).filter(
ItemStat.item_id.in_(
db.session.query(Job.id).filter(
Job.build_id == build.id,
)
),
ItemStat.name == name,
).as_scalar()
try_create(ItemStat, where={
'item_id': build.id,
'name': name,
}, defaults={
'value': value
})
def abort_build(task):
build = Build.query.get(task.kwargs['build_id'])
build.status = Status.finished
build.result = Result.aborted
db.session.add(build)
db.session.commit()
current_app.logger.exception('Unrecoverable exception syncing build %s', build.id)
@tracked_task(on_abort=abort_build)
def sync_build(build_id):
"""
Synchronizing the build happens continuously until all jobs have reported in
as finished or have failed/aborted.
This task is responsible for:
- Checking in with jobs
- Aborting/retrying them if they're beyond limits
- Aggregating the results from jobs into the build itself
"""
build = Build.query.get(build_id)
if not build:
return
if build.status == Status.finished:
return
all_jobs = list(Job.query.filter(
Job.build_id == build_id,
))
is_finished = sync_build.verify_all_children() == Status.finished
build.date_started = safe_agg(
min, (j.date_started for j in all_jobs if j.date_started))
if is_finished:
build.date_finished = safe_agg(
max, (j.date_finished for j in all_jobs if j.date_finished))
else:
build.date_finished = None
if build.date_started and build.date_finished:
build.duration = int((build.date_finished - build.date_started).total_seconds() * 1000)
else:
build.duration = None
if any(j.result is Result.failed for j in all_jobs):
build.result = Result.failed
elif is_finished:
build.result = safe_agg(max, (j.result for j in all_jobs))
else:
build.result = Result.unknown
if is_finished:
build.status = Status.finished
elif any(j.status is not Status.queued for j in all_jobs):
build.status = Status.in_progress
else:
build.status = Status.queued
if db.session.is_modified(build):
build.date_modified = datetime.utcnow()
db.session.add(build)
db.session.commit()
if not is_finished:
raise sync_build.NotFinished
try:
aggregate_build_stat(build, 'test_count')
aggregate_build_stat(build, 'test_duration')
aggregate_build_stat(build, 'test_failures')
aggregate_build_stat(build, 'test_rerun_count')
aggregate_build_stat(build, 'tests_missing')
aggregate_build_stat(build, 'lines_covered')
aggregate_build_stat(build, 'lines_uncovered')
aggregate_build_stat(build, 'diff_lines_covered')
aggregate_build_stat(build, 'diff_lines_uncovered')
except Exception:
current_app.logger.exception('Failing recording aggregate stats for build %s', build.id)
fire_signal.delay(
signal='build.finished',
kwargs={'build_id': build.id.hex},
)
queue.delay('update_project_stats', kwargs={
'project_id': build.project_id.hex,
}, countdown=1)
|
apache-2.0
|
Jusedawg/SickRage
|
lib/github/StatsPunchCard.py
|
74
|
2341
|
# -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2013 Vincent Jacques <[email protected]> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
import github.NamedUser
class StatsPunchCard(github.GithubObject.NonCompletableGithubObject):
"""
This class represents the punch card. The reference can be found here http://developer.github.com/v3/repos/statistics/#get-the-number-of-commits-per-hour-in-each-day
"""
def get(self, day, hour):
"""
Get a specific element
:param day: int
:param hour: int
:rtype: int
"""
return self._dict[(day, hour)]
def _initAttributes(self):
self._dict = {}
def _useAttributes(self, attributes):
for day, hour, commits in attributes:
self._dict[(day, hour)] = commits
|
gpl-3.0
|
xiafan68/socialtsquery
|
script/transform/mergetransform.py
|
1
|
6150
|
# encoding:utf8
import sys
import os
import json
from optparse import OptionParser
class Transformer(object):
def __init__(self, idir, prefixs, start):
self.idir = idir.decode("utf8")
self.prefixs = prefixs
self.start = start
def loadRecs(self, xaxis, yaxis):
self.xaxis = xaxis
self.yaxis = yaxis
self.recs={}
self.xval = set()
self.yval = set()
for prefix in self.prefixs:
input = open(os.path.join(self.idir,prefix+"_o"+self.start+"w.txt"), "r")
for line in input.readlines():
row = json.loads(line)
if not(row[self.xaxis] in self.recs):
self.recs[row[self.xaxis]]={}
curDict = self.recs[row[self.xaxis]]
if not(prefix in curDict):
curDict[prefix]={}
curDict = curDict[prefix]
curDict[row[self.yaxis]] = row
self.xval.add(row[self.xaxis])
self.yval.add(row[self.yaxis])
input.close()
self.xval = sorted(self.xval)
self.yval = sorted(self.yval)
"""
xaxis approaches yaxis[1] ....
field1 field2 field3
"""
def writeHeader(self, output, fields):
header = "axis\tapp"
stubs = "\t".join([" " for i in range(len(fields) - 1)])
for yval in self.yval:
header = "\t%s\t%s\t%s"%(header,str(yval), stubs)
output.write(header+"\n")
header = " \t \t"
fieldsStr = "\t".join(fields)
fieldArr=[]
for i in range(len(self.yval)):
fieldArr.append(fieldsStr)
header += "\t".join(fieldArr)
output.write(header + "\n")
def transform(self, oFile, fields):
matrix = {}
output = open(oFile, "w")
self.writeHeader(output, fields)
for xval in self.xval:
xLine=str(xval)
for prefix in self.prefixs:
prefixLine = "%s\t%s"%(xLine,prefix.replace("_","\\\\_"))
line = prefixLine
rec = self.recs[xval][prefix]
for yval in self.yval:
yrec = rec[yval]
line = "%s\t%s"%(line, "\t".join([str(yrec[field]) for field in fields]))
output.write(line)
output.write("\n")
output.close()
def transformTime(self, oFile, fields):
matrix = {}
output = open(oFile, "w")
self.writeHeader(output, fields)
for xval in self.xval:
xLine=str(xval)
for prefix in self.prefixs:
prefixLine = "%s\t%s"%(xLine,prefix)
line = prefixLine
rec = self.recs[xval][prefix]
for yval in self.yval:
yrec = rec[yval]
newRec = []
agg=0.0
for field in fields:
if field != 'total_time':
agg += yrec[field]
newRec.append(yrec[field])
newRec.append(yrec['total_time'] - agg)
line = "%s\t%s"%(line, "\t".join([str(field) for field in newRec]))
output.write(line)
output.write("\n")
output.close()
def vararg_callback(option, opt_str, value, parser):
assert value is None
value = []
for arg in parser.rargs:
# stop on --foo like options
if arg[:2] == "--" and len(arg) > 2:
break
# stop on -a, but not on -3 or -3.0
if arg[:1] == "-" and len(arg) > 1:
break
value.append(arg)
del parser.rargs[:len(value)]
setattr(parser.values, option.dest, value)
"""
将多个文件中的数据进行合并,得到如下格式:
xaxis approaches yaxis[1] ....
field1 field2 field3
"""
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-i", "--idir",dest="inputs", help="input directory")
parser.add_option("-p", "--prefix",action="callback",callback=vararg_callback, dest="prefix", help="prefix of input files")
parser.add_option("-s", "--start", dest="start", help="offset from the start time of event")
parser.add_option("-o", "--odir", dest="odir", help="output directory")
parser.add_option("-x", "--xaxis", dest="xaxis", help="x axis")
parser.add_option("-y", "--yaxis", dest="yaxis", help="y axis")
parser.add_option("-f", "--fields", dest="fields",action="callback",callback=vararg_callback, help="fields")
parser.add_option("-t", "--tag", dest="tag",help="tag that will appended to the output file name")
parser.add_option("-m", "--minus", dest="minus",action="store_true",default=False, help="whether minus other costs from the total time" )
(options, args) = parser.parse_args(sys.argv)
print args
if len(args) != 1:
parser.print_help()
print "input file",options.prefix
t = Transformer(options.inputs, options.prefix, options.start)
t.loadRecs(options.xaxis, options.yaxis)
#oFile = os.path.join(options.odir, "k_w", "IO")
if not os.path.exists(options.odir):
os.makedirs(options.odir)
tag = ""
if options.tag:
tag = options.tag
fName = "%s_%s_s%s_%s.txt"%(options.xaxis,options.yaxis,options.start, tag)
oFile = os.path.join(options.odir, fName)
t.transform(oFile, options.fields)
#t.transform(oFile, "width", "k", ["atomic", "segstore", "basetree", "LIST_IO"])
#-p hpl_ns hpl_s ipl_s ipl_ns -i /home/xiafan/KuaiPan/dataset/exprresult/2014_12_26/raw -s 0 -o /home/xiafan/KuaiPan/dataset/exprresult/2014_12_26/transform -f basetree atomic segstore LIST_IO -x k -y width -t io
#-p hpl_ns hpl_s ipl_s ipl_ns -i /home/xiafan/KuaiPan/dataset/exprresult/2014_12_26/raw -s 0 -o /home/xiafan/KuaiPan/dataset/exprresult/2014_12_26/transform -f basetree_time atomic_time segstore_time compute_score LIST_IO_time de_sketch other_cost -x k -y width -t time
|
apache-2.0
|
df-a/par-cw
|
node_modules/gulp-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
|
1534
|
3426
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import os
import gyp
import gyp.common
import gyp.msvs_emulation
import json
import sys
generator_supports_multiple_toolsets = True
generator_wants_static_library_dependencies_adjusted = False
generator_filelist_paths = {
}
generator_default_variables = {
}
for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
'LIB_DIR', 'SHARED_LIB_DIR']:
# Some gyp steps fail if these are empty(!).
generator_default_variables[dirname] = 'dir'
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
'CONFIGURATION_NAME']:
generator_default_variables[unused] = ''
def CalculateVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
for key, val in generator_flags.items():
default_variables.setdefault(key, val)
default_variables.setdefault('OS', gyp.common.GetFlavor(params))
flavor = gyp.common.GetFlavor(params)
if flavor =='win':
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
gyp)."""
generator_flags = params.get('generator_flags', {})
if generator_flags.get('adjust_static_libraries', False):
global generator_wants_static_library_dependencies_adjusted
generator_wants_static_library_dependencies_adjusted = True
toplevel = params['options'].toplevel_dir
generator_dir = os.path.relpath(params['options'].generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = generator_flags.get('output_dir', 'out')
qualified_out_dir = os.path.normpath(os.path.join(
toplevel, generator_dir, output_dir, 'gypfiles'))
global generator_filelist_paths
generator_filelist_paths = {
'toplevel': toplevel,
'qualified_out_dir': qualified_out_dir,
}
def GenerateOutput(target_list, target_dicts, data, params):
# Map of target -> list of targets it depends on.
edges = {}
# Queue of targets to visit.
targets_to_visit = target_list[:]
while len(targets_to_visit) > 0:
target = targets_to_visit.pop()
if target in edges:
continue
edges[target] = []
for dep in target_dicts[target].get('dependencies', []):
edges[target].append(dep)
targets_to_visit.append(dep)
try:
filepath = params['generator_flags']['output_dir']
except KeyError:
filepath = '.'
filename = os.path.join(filepath, 'dump.json')
f = open(filename, 'w')
json.dump(edges, f)
f.close()
print 'Wrote json to %s.' % filename
|
mpl-2.0
|
mayqueenEMBEDDED/mq-kernel
|
tools/perf/util/setup.py
|
320
|
1689
|
#!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = getenv('CFLAGS', '').split()
# switch off several checks (need to be at the end of cflags list)
cflags += ['-fno-strict-aliasing', '-Wno-write-strings', '-Wno-unused-parameter' ]
src_perf = getenv('srctree') + '/tools/perf'
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
libtraceevent = getenv('LIBTRACEEVENT')
libapikfs = getenv('LIBAPI')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
# use full paths with source files
ext_sources = map(lambda x: '%s/%s' % (src_perf, x) , ext_sources)
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
extra_objects = [libtraceevent, libapikfs],
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='[email protected]',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
|
gpl-2.0
|
glouppe/scikit-learn
|
benchmarks/bench_lasso.py
|
297
|
3305
|
"""
Benchmarks of Lasso vs LassoLars
First, we fix a training set and increase the number of
samples. Then we plot the computation time as function of
the number of samples.
In the second benchmark, we increase the number of dimensions of the
training set. Then we plot the computation time as function of
the number of dimensions.
In both cases, only 10% of the features are informative.
"""
import gc
from time import time
import numpy as np
from sklearn.datasets.samples_generator import make_regression
def compute_bench(alpha, n_samples, n_features, precompute):
lasso_results = []
lars_lasso_results = []
it = 0
for ns in n_samples:
for nf in n_features:
it += 1
print('==================')
print('Iteration %s of %s' % (it, max(len(n_samples),
len(n_features))))
print('==================')
n_informative = nf // 10
X, Y, coef_ = make_regression(n_samples=ns, n_features=nf,
n_informative=n_informative,
noise=0.1, coef=True)
X /= np.sqrt(np.sum(X ** 2, axis=0)) # Normalize data
gc.collect()
print("- benchmarking Lasso")
clf = Lasso(alpha=alpha, fit_intercept=False,
precompute=precompute)
tstart = time()
clf.fit(X, Y)
lasso_results.append(time() - tstart)
gc.collect()
print("- benchmarking LassoLars")
clf = LassoLars(alpha=alpha, fit_intercept=False,
normalize=False, precompute=precompute)
tstart = time()
clf.fit(X, Y)
lars_lasso_results.append(time() - tstart)
return lasso_results, lars_lasso_results
if __name__ == '__main__':
from sklearn.linear_model import Lasso, LassoLars
import pylab as pl
alpha = 0.01 # regularization parameter
n_features = 10
list_n_samples = np.linspace(100, 1000000, 5).astype(np.int)
lasso_results, lars_lasso_results = compute_bench(alpha, list_n_samples,
[n_features], precompute=True)
pl.figure('scikit-learn LASSO benchmark results')
pl.subplot(211)
pl.plot(list_n_samples, lasso_results, 'b-',
label='Lasso')
pl.plot(list_n_samples, lars_lasso_results, 'r-',
label='LassoLars')
pl.title('precomputed Gram matrix, %d features, alpha=%s' % (n_features, alpha))
pl.legend(loc='upper left')
pl.xlabel('number of samples')
pl.ylabel('Time (s)')
pl.axis('tight')
n_samples = 2000
list_n_features = np.linspace(500, 3000, 5).astype(np.int)
lasso_results, lars_lasso_results = compute_bench(alpha, [n_samples],
list_n_features, precompute=False)
pl.subplot(212)
pl.plot(list_n_features, lasso_results, 'b-', label='Lasso')
pl.plot(list_n_features, lars_lasso_results, 'r-', label='LassoLars')
pl.title('%d samples, alpha=%s' % (n_samples, alpha))
pl.legend(loc='upper left')
pl.xlabel('number of features')
pl.ylabel('Time (s)')
pl.axis('tight')
pl.show()
|
bsd-3-clause
|
be-cloud-be/horizon-addons
|
server/openerp/addons/test_inherit/tests/test_inherit.py
|
44
|
4554
|
# -*- coding: utf-8 -*-
from openerp.tests import common
class test_inherits(common.TransactionCase):
def test_00_inherits(self):
""" Check that a many2one field with delegate=True adds an entry in _inherits """
daughter = self.env['test.inherit.daughter']
self.assertEqual(daughter._inherits, {'test.inherit.mother': 'template_id'})
def test_10_access_from_child_to_parent_model(self):
""" check whether added field in model is accessible from children models (_inherits) """
# This test checks if the new added column of a parent model
# is accessible from the child model. This test has been written
# to verify the purpose of the inheritance computing of the class
# in the openerp.osv.orm._build_model.
mother = self.env['test.inherit.mother']
daughter = self.env['test.inherit.daughter']
self.assertIn('field_in_mother', mother._fields)
self.assertIn('field_in_mother', daughter._fields)
def test_20_field_extension(self):
""" check the extension of a field in an inherited model """
mother = self.env['test.inherit.mother']
daughter = self.env['test.inherit.daughter']
# the field mother.name must have required=True and "Bar" as default
field = mother._fields['name']
self.assertTrue(field.required)
self.assertEqual(field.default(mother), "Bar")
self.assertEqual(mother._defaults.get('name'), "Bar")
self.assertEqual(mother.default_get(['name']), {'name': "Bar"})
# the field daughter.name must have required=False and "Baz" as default
field = daughter._fields['name']
self.assertFalse(field.required)
self.assertEqual(field.default(daughter), "Baz")
self.assertEqual(daughter._defaults.get('name'), "Baz")
self.assertEqual(daughter.default_get(['name']), {'name': "Baz"})
# the field daughter.template_id should have
# comodel_name='test.inherit.mother', string='Template', required=True
field = daughter._fields['template_id']
self.assertEqual(field.comodel_name, 'test.inherit.mother')
self.assertEqual(field.string, "Template")
self.assertTrue(field.required)
def test_30_depends_extension(self):
""" check that @depends on overridden compute methods extends dependencies """
mother = self.env['test.inherit.mother']
field = mother._fields['surname']
# the field dependencies are added
self.assertItemsEqual(field.depends, ['name', 'field_in_mother'])
def test_40_selection_extension(self):
""" check that attribute selection_add=... extends selection on fields. """
mother = self.env['test.inherit.mother']
# the extra values are added, both in the field and the column
self.assertEqual(mother._fields['state'].selection,
[('a', 'A'), ('b', 'B'), ('c', 'C'), ('d', 'D')])
self.assertEqual(mother._columns['state'].selection,
[('a', 'A'), ('b', 'B'), ('c', 'C'), ('d', 'D')])
def test_50_search_one2many(self):
""" check search on one2many field based on inherited many2one field. """
# create a daughter record attached to partner Demo
partner_demo = self.env.ref('base.partner_demo')
daughter = self.env['test.inherit.daughter'].create({'partner_id': partner_demo.id})
self.assertEqual(daughter.partner_id, partner_demo)
self.assertIn(daughter, partner_demo.daughter_ids)
# search the partner from the daughter record
partners = self.env['res.partner'].search([('daughter_ids', 'in', daughter.ids)])
self.assertIn(partner_demo, partners)
class test_override_property(common.TransactionCase):
def test_override_with_function_field(self):
""" test overriding a property field by a function field """
record = self.env['test.inherit.property'].create({'name': "Stuff"})
# record.property_foo is not a property field
self.assertEqual(record.property_foo, 42)
self.assertFalse(type(record).property_foo.company_dependent)
def test_override_with_computed_field(self):
""" test overriding a property field by a computed field """
record = self.env['test.inherit.property'].create({'name': "Stuff"})
# record.property_bar is not a property field
self.assertEqual(record.property_bar, 42)
self.assertFalse(type(record).property_bar.company_dependent)
|
agpl-3.0
|
gx1997/chrome-loongson
|
build/extract_from_cab.py
|
10
|
1750
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts a single file from a CAB archive."""
import os
import shutil
import subprocess
import sys
import tempfile
def main():
if len(sys.argv) != 4:
print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
return 1
[cab_path, archived_file, output_dir] = sys.argv[1:]
# Expand.exe does its work in a fixed-named temporary directory created within
# the given output directory. This is a problem for concurrent extractions, so
# create a unique temp dir within the desired output directory to work around
# this limitation.
temp_dir = tempfile.mkdtemp(dir=output_dir)
try:
# Invoke the Windows expand utility to extract the file.
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, temp_dir])
if level == 0:
# Move the output file into place, preserving expand.exe's behavior of
# paving over any preexisting file.
output_file = os.path.join(output_dir, archived_file)
try:
os.remove(output_file)
except OSError:
pass
os.rename(os.path.join(temp_dir, archived_file), output_file)
finally:
shutil.rmtree(temp_dir, True)
if level != 0:
return level
# The expand utility preserves the modification date and time of the archived
# file. Touch the extracted file. This helps build systems that compare the
# modification times of input and output files to determine whether to do an
# action.
os.utime(os.path.join(output_dir, archived_file), None)
return 0
if __name__ == '__main__':
sys.exit(main())
|
bsd-3-clause
|
spirrello/spirrello-pynet-work
|
applied_python/lib/python2.7/site-packages/ansible/modules/core/files/unarchive.py
|
6
|
10956
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <[email protected]>
# (c) 2013, Dylan Martin <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: unarchive
version_added: 1.4
short_description: Unpacks an archive after (optionally) copying it from the local machine.
extends_documentation_fragment: files
description:
- The M(unarchive) module unpacks an archive. By default, it will copy the source file from the local system to the target before unpacking - set copy=no to unpack an archive which already exists on the target..
options:
src:
description:
- If copy=yes (default), local path to archive file to copy to the target server; can be absolute or relative. If copy=no, path on the target server to existing archive file to unpack.
required: true
default: null
dest:
description:
- Remote absolute path where the archive should be unpacked
required: true
default: null
copy:
description:
- "If true, the file is copied from local 'master' to the target machine, otherwise, the plugin will look for src archive at the target machine."
required: false
choices: [ "yes", "no" ]
default: "yes"
creates:
description:
- a filename, when it already exists, this step will B(not) be run.
required: no
default: null
version_added: "1.6"
author: Dylan Martin
todo:
- detect changed/unchanged for .zip files
- handle common unarchive args, like preserve owner/timestamp etc...
notes:
- requires C(tar)/C(unzip) command on target host
- can handle I(gzip), I(bzip2) and I(xz) compressed as well as uncompressed tar files
- detects type of archive automatically
- uses tar's C(--diff arg) to calculate if changed or not. If this C(arg) is not
supported, it will always unpack the archive
- does not detect if a .zip file is different from destination - always unzips
- existing files/directories in the destination which are not in the archive
are not touched. This is the same behavior as a normal archive extraction
- existing files/directories in the destination which are not in the archive
are ignored for purposes of deciding if the archive should be unpacked or not
'''
EXAMPLES = '''
# Example from Ansible Playbooks
- unarchive: src=foo.tgz dest=/var/lib/foo
# Unarchive a file that is already on the remote machine
- unarchive: src=/tmp/foo.zip dest=/usr/local/bin copy=no
'''
import os
from zipfile import ZipFile
class UnarchiveError(Exception):
pass
# class to handle .zip files
class ZipArchive(object):
def __init__(self, src, dest, module):
self.src = src
self.dest = dest
self.module = module
self.cmd_path = self.module.get_bin_path('unzip')
self._files_in_archive = []
@property
def files_in_archive(self, force_refresh=False):
if self._files_in_archive and not force_refresh:
return self._files_in_archive
archive = ZipFile(self.src)
try:
self._files_in_archive = archive.namelist()
except:
raise UnarchiveError('Unable to list files in the archive')
return self._files_in_archive
def is_unarchived(self, mode, owner, group):
return dict(unarchived=False)
def unarchive(self):
cmd = '%s -o "%s" -d "%s"' % (self.cmd_path, self.src, self.dest)
rc, out, err = self.module.run_command(cmd)
return dict(cmd=cmd, rc=rc, out=out, err=err)
def can_handle_archive(self):
if not self.cmd_path:
return False
cmd = '%s -l "%s"' % (self.cmd_path, self.src)
rc, out, err = self.module.run_command(cmd)
if rc == 0:
return True
return False
# class to handle gzipped tar files
class TgzArchive(object):
def __init__(self, src, dest, module):
self.src = src
self.dest = dest
self.module = module
# Prefer gtar (GNU tar) as it supports the compression options -zjJ
self.cmd_path = self.module.get_bin_path('gtar', None)
if not self.cmd_path:
# Fallback to tar
self.cmd_path = self.module.get_bin_path('tar')
self.zipflag = 'z'
self._files_in_archive = []
@property
def files_in_archive(self, force_refresh=False):
if self._files_in_archive and not force_refresh:
return self._files_in_archive
cmd = '%s -t%sf "%s"' % (self.cmd_path, self.zipflag, self.src)
rc, out, err = self.module.run_command(cmd)
if rc != 0:
raise UnarchiveError('Unable to list files in the archive')
for filename in out.splitlines():
if filename:
self._files_in_archive.append(filename)
return self._files_in_archive
def is_unarchived(self, mode, owner, group):
cmd = '%s -C "%s" --diff -%sf "%s"' % (self.cmd_path, self.dest, self.zipflag, self.src)
rc, out, err = self.module.run_command(cmd)
unarchived = (rc == 0)
if not unarchived:
# Check whether the differences are in something that we're
# setting anyway
# What will be set
to_be_set = set()
for perm in (('Mode', mode), ('Gid', group), ('Uid', owner)):
if perm[1] is not None:
to_be_set.add(perm[0])
# What is different
changes = set()
difference_re = re.compile(r': (.*) differs$')
for line in out.splitlines():
match = difference_re.search(line)
if not match:
# Unknown tar output. Assume we have changes
return dict(unarchived=unarchived, rc=rc, out=out, err=err, cmd=cmd)
changes.add(match.groups()[0])
if changes and changes.issubset(to_be_set):
unarchived = True
return dict(unarchived=unarchived, rc=rc, out=out, err=err, cmd=cmd)
def unarchive(self):
cmd = '%s -x%sf "%s"' % (self.cmd_path, self.zipflag, self.src)
rc, out, err = self.module.run_command(cmd, cwd=self.dest)
return dict(cmd=cmd, rc=rc, out=out, err=err)
def can_handle_archive(self):
if not self.cmd_path:
return False
try:
if self.files_in_archive:
return True
except UnarchiveError:
pass
# Errors and no files in archive assume that we weren't able to
# properly unarchive it
return False
# class to handle tar files that aren't compressed
class TarArchive(TgzArchive):
def __init__(self, src, dest, module):
super(TarArchive, self).__init__(src, dest, module)
self.zipflag = ''
# class to handle bzip2 compressed tar files
class TarBzipArchive(TgzArchive):
def __init__(self, src, dest, module):
super(TarBzipArchive, self).__init__(src, dest, module)
self.zipflag = 'j'
# class to handle xz compressed tar files
class TarXzArchive(TgzArchive):
def __init__(self, src, dest, module):
super(TarXzArchive, self).__init__(src, dest, module)
self.zipflag = 'J'
# try handlers in order and return the one that works or bail if none work
def pick_handler(src, dest, module):
handlers = [TgzArchive, ZipArchive, TarArchive, TarBzipArchive, TarXzArchive]
for handler in handlers:
obj = handler(src, dest, module)
if obj.can_handle_archive():
return obj
module.fail_json(msg='Failed to find handler to unarchive. Make sure the required command to extract the file is installed.')
def main():
module = AnsibleModule(
# not checking because of daisy chain to file module
argument_spec = dict(
src = dict(required=True),
original_basename = dict(required=False), # used to handle 'dest is a directory' via template, a slight hack
dest = dict(required=True),
copy = dict(default=True, type='bool'),
creates = dict(required=False),
),
add_file_common_args=True,
)
src = os.path.expanduser(module.params['src'])
dest = os.path.expanduser(module.params['dest'])
copy = module.params['copy']
file_args = module.load_file_common_arguments(module.params)
# did tar file arrive?
if not os.path.exists(src):
if copy:
module.fail_json(msg="Source '%s' failed to transfer" % src)
else:
module.fail_json(msg="Source '%s' does not exist" % src)
if not os.access(src, os.R_OK):
module.fail_json(msg="Source '%s' not readable" % src)
# is dest OK to receive tar file?
if not os.path.isdir(dest):
module.fail_json(msg="Destination '%s' is not a directory" % dest)
if not os.access(dest, os.W_OK):
module.fail_json(msg="Destination '%s' not writable" % dest)
handler = pick_handler(src, dest, module)
res_args = dict(handler=handler.__class__.__name__, dest=dest, src=src)
# do we need to do unpack?
res_args['check_results'] = handler.is_unarchived(file_args['mode'],
file_args['owner'], file_args['group'])
if res_args['check_results']['unarchived']:
res_args['changed'] = False
else:
# do the unpack
try:
res_args['extract_results'] = handler.unarchive()
if res_args['extract_results']['rc'] != 0:
module.fail_json(msg="failed to unpack %s to %s" % (src, dest), **res_args)
except IOError:
module.fail_json(msg="failed to unpack %s to %s" % (src, dest))
else:
res_args['changed'] = True
# do we need to change perms?
for filename in handler.files_in_archive:
file_args['path'] = os.path.join(dest, filename)
try:
res_args['changed'] = module.set_fs_attributes_if_different(file_args, res_args['changed'])
except (IOError, OSError), e:
module.fail_json(msg="Unexpected error when accessing exploded file: %s" % str(e))
module.exit_json(**res_args)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
factorlibre/l10n-spain
|
l10n_es/migrations/8.0.4.1/pre-rename.py
|
52
|
1556
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2014 Domatix (http://www.domatix.com)
# Angel Moya <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
__name__ = ("Cambia columnas name y description")
def migrate_tax_template(cr, version):
cr.execute("""ALTER TABLE account_tax
RENAME COLUMN name to name_to_description_temp""")
cr.execute("""ALTER TABLE account_tax
RENAME COLUMN description to name""")
cr.execute("""ALTER TABLE account_tax
RENAME COLUMN name_to_description_temp to description""")
def migrate(cr, version):
if not version:
return
migrate_tax_template(cr, version)
|
agpl-3.0
|
sysbot/CouchPotatoServer
|
libs/rsa/varblock.py
|
216
|
4360
|
# -*- coding: utf-8 -*-
#
# Copyright 2011 Sybren A. Stüvel <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''VARBLOCK file support
The VARBLOCK file format is as follows, where || denotes byte concatenation:
FILE := VERSION || BLOCK || BLOCK ...
BLOCK := LENGTH || DATA
LENGTH := varint-encoded length of the subsequent data. Varint comes from
Google Protobuf, and encodes an integer into a variable number of bytes.
Each byte uses the 7 lowest bits to encode the value. The highest bit set
to 1 indicates the next byte is also part of the varint. The last byte will
have this bit set to 0.
This file format is called the VARBLOCK format, in line with the varint format
used to denote the block sizes.
'''
from rsa._compat import byte, b
ZERO_BYTE = b('\x00')
VARBLOCK_VERSION = 1
def read_varint(infile):
'''Reads a varint from the file.
When the first byte to be read indicates EOF, (0, 0) is returned. When an
EOF occurs when at least one byte has been read, an EOFError exception is
raised.
@param infile: the file-like object to read from. It should have a read()
method.
@returns (varint, length), the read varint and the number of read bytes.
'''
varint = 0
read_bytes = 0
while True:
char = infile.read(1)
if len(char) == 0:
if read_bytes == 0:
return (0, 0)
raise EOFError('EOF while reading varint, value is %i so far' %
varint)
byte = ord(char)
varint += (byte & 0x7F) << (7 * read_bytes)
read_bytes += 1
if not byte & 0x80:
return (varint, read_bytes)
def write_varint(outfile, value):
'''Writes a varint to a file.
@param outfile: the file-like object to write to. It should have a write()
method.
@returns the number of written bytes.
'''
# there is a big difference between 'write the value 0' (this case) and
# 'there is nothing left to write' (the false-case of the while loop)
if value == 0:
outfile.write(ZERO_BYTE)
return 1
written_bytes = 0
while value > 0:
to_write = value & 0x7f
value = value >> 7
if value > 0:
to_write |= 0x80
outfile.write(byte(to_write))
written_bytes += 1
return written_bytes
def yield_varblocks(infile):
'''Generator, yields each block in the input file.
@param infile: file to read, is expected to have the VARBLOCK format as
described in the module's docstring.
@yields the contents of each block.
'''
# Check the version number
first_char = infile.read(1)
if len(first_char) == 0:
raise EOFError('Unable to read VARBLOCK version number')
version = ord(first_char)
if version != VARBLOCK_VERSION:
raise ValueError('VARBLOCK version %i not supported' % version)
while True:
(block_size, read_bytes) = read_varint(infile)
# EOF at block boundary, that's fine.
if read_bytes == 0 and block_size == 0:
break
block = infile.read(block_size)
read_size = len(block)
if read_size != block_size:
raise EOFError('Block size is %i, but could read only %i bytes' %
(block_size, read_size))
yield block
def yield_fixedblocks(infile, blocksize):
'''Generator, yields each block of ``blocksize`` bytes in the input file.
:param infile: file to read and separate in blocks.
:returns: a generator that yields the contents of each block
'''
while True:
block = infile.read(blocksize)
read_bytes = len(block)
if read_bytes == 0:
break
yield block
if read_bytes < blocksize:
break
|
gpl-3.0
|
cyberden/CouchPotatoServer
|
couchpotato/core/_base/scheduler.py
|
73
|
2528
|
from apscheduler.scheduler import Scheduler as Sched
from couchpotato.core.event import addEvent
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
log = CPLog(__name__)
autoload = 'Scheduler'
class Scheduler(Plugin):
crons = {}
intervals = {}
started = False
def __init__(self):
addEvent('schedule.cron', self.cron)
addEvent('schedule.interval', self.interval)
addEvent('schedule.remove', self.remove)
addEvent('schedule.queue', self.queue)
self.sched = Sched(misfire_grace_time = 60)
self.sched.start()
self.started = True
def remove(self, identifier):
for cron_type in ['intervals', 'crons']:
try:
self.sched.unschedule_job(getattr(self, cron_type)[identifier]['job'])
log.debug('%s unscheduled %s', (cron_type.capitalize(), identifier))
except:
pass
def doShutdown(self, *args, **kwargs):
self.stop()
return super(Scheduler, self).doShutdown(*args, **kwargs)
def stop(self):
if self.started:
log.debug('Stopping scheduler')
self.sched.shutdown(wait = False)
log.debug('Scheduler stopped')
self.started = False
def cron(self, identifier = '', handle = None, day = '*', hour = '*', minute = '*'):
log.info('Scheduling "%s", cron: day = %s, hour = %s, minute = %s', (identifier, day, hour, minute))
self.remove(identifier)
self.crons[identifier] = {
'handle': handle,
'day': day,
'hour': hour,
'minute': minute,
'job': self.sched.add_cron_job(handle, day = day, hour = hour, minute = minute)
}
def interval(self, identifier = '', handle = None, hours = 0, minutes = 0, seconds = 0):
log.info('Scheduling %s, interval: hours = %s, minutes = %s, seconds = %s', (identifier, hours, minutes, seconds))
self.remove(identifier)
self.intervals[identifier] = {
'handle': handle,
'hours': hours,
'minutes': minutes,
'seconds': seconds,
'job': self.sched.add_interval_job(handle, hours = hours, minutes = minutes, seconds = seconds)
}
return True
def queue(self, handlers = None):
if not handlers: handlers = []
for h in handlers:
h()
if self.shuttingDown():
break
return True
|
gpl-3.0
|
odinho/medlemssys
|
medlemssys/config/settings/common.py
|
2
|
9613
|
# -*- coding: utf-8 -*-
"""
Django settings for Medlemssys project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
from __future__ import absolute_import, unicode_literals
import os
# (./medlemssys/config/settings/__init__.py => ./)
APPS_DIR = os.path.normpath(os.path.dirname(__file__) + '/../../')
ROOT_DIR = os.path.normpath(APPS_DIR + '/../')
# APP CONFIGURATION
# ------------------------------------------------------------------------------
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
'django.contrib.humanize',
# Admin
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'rest_framework', # API
'rest_framework.authtoken',
'reversion', # History
'reversion_compare', # UI for history
)
# Apps specific for this project go here.
LOCAL_APPS = (
'medlemssys.medlem',
'medlemssys.innhenting',
'medlemssys.statistikk',
'medlemssys.giro',
'medlemssys.api',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIDDLEWARE CONFIGURATION
# ------------------------------------------------------------------------------
MIDDLEWARE = (
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# MIGRATIONS CONFIGURATION
# ------------------------------------------------------------------------------
#MIGRATION_MODULES = {
# 'sites': 'medlemssys.contrib.sites.migrations'
#}
# DEBUG
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = False
# FIXTURE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
os.path.join(APPS_DIR, 'fixtures'),
)
# EMAIL CONFIGURATION
# ------------------------------------------------------------------------------
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# MANAGER CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
("""Odin Hørthe Omdal""", '[email protected]'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'medlemssys',
}
}
# GENERAL CONFIGURATION
# ------------------------------------------------------------------------------
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'UTC'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'nn-no'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [
os.path.join(APPS_DIR, 'templates'),
],
'OPTIONS': {
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
# STATIC FILE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = os.path.join(ROOT_DIR, 'staticfiles')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
os.path.join(APPS_DIR, 'static'),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# Debug logging
# Override in defaults
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s '
'%(process)d %(thread)d %(message)s',
},
'simple': {
'format': '%(asctime)s %(levelname)s %(message)s ',
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple',
},
},
'loggers': {
'django.db.backends': {
'level': 'ERROR',
'handlers': ['console'],
'propagate': True,
},
'': {
'level': 'INFO',
'handlers': ['console'],
'propagate': True,
},
},
}
# MEDIA CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = os.path.join(APPS_DIR, 'media')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# URL Configuration
# ------------------------------------------------------------------------------
ROOT_URLCONF = 'medlemssys.config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'medlemssys.config.wsgi.application'
# Location of root django.contrib.admin URL, use {% url 'admin:index' %}
ADMIN_URL = r'^admin/'
# AUTHENTICATION CONFIGURATION
# ------------------------------------------------------------------------------
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
# SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = 'slugify.slugify'
# REST FRAMEWORK CONFIGURATION
# ------------------------------------------------------------------------------
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAdminUser',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.BasicAuthentication',
),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 200,
}
# MEDLEMSSYS CONFIGURATION
# ------------------------------------------------------------------------------
BEHAVIOUR_MODULE = 'medlemssys.behaviour.barnogungdom'
KONTONUMMER = '3450 65 48618'
ORGNUMMER = '959 358 451'
ORG_ADR = ''
# Token used for creating medlems via API. To stop stupid bots.
INNMELDING_TOKEN = ''
VERVETOPP = None
# Used for allowing CORS, and where the ?next= redirector can point
TRUSTED_EXTERNAL_DOMAINS = ['example.com']
DEFAULT_HOST = ''
DEFAULT_EMAIL = ''
# Deprecated
GIRO_CSV = os.path.join(ROOT_DIR, 'nmu-bet.csv')
LAG_CSV = os.path.join(ROOT_DIR, 'nmu-lag.csv')
MEDLEM_CSV = os.path.join(ROOT_DIR, 'nmu-medl.csv')
|
agpl-3.0
|
gotomypc/bigcouch
|
couchjs/scons/scons-local-2.0.1/SCons/compat/_scons_subprocess.py
|
183
|
44500
|
# subprocess - Subprocesses with accessible I/O streams
#
# For more information about this module, see PEP 324.
#
# This module should remain compatible with Python 2.2, see PEP 291.
#
# Copyright (c) 2003-2005 by Peter Astrand <[email protected]>
#
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/2.4/license for licensing details.
r"""subprocess - Subprocesses with accessible I/O streams
This module allows you to spawn processes, connect to their
input/output/error pipes, and obtain their return codes. This module
intends to replace several other, older modules and functions, like:
os.system
os.spawn*
os.popen*
popen2.*
commands.*
Information about how the subprocess module can be used to replace these
modules and functions can be found below.
Using the subprocess module
===========================
This module defines one class called Popen:
class Popen(args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0):
Arguments are:
args should be a string, or a sequence of program arguments. The
program to execute is normally the first item in the args sequence or
string, but can be explicitly set by using the executable argument.
On UNIX, with shell=False (default): In this case, the Popen class
uses os.execvp() to execute the child program. args should normally
be a sequence. A string will be treated as a sequence with the string
as the only item (the program to execute).
On UNIX, with shell=True: If args is a string, it specifies the
command string to execute through the shell. If args is a sequence,
the first item specifies the command string, and any additional items
will be treated as additional shell arguments.
On Windows: the Popen class uses CreateProcess() to execute the child
program, which operates on strings. If args is a sequence, it will be
converted to a string using the list2cmdline method. Please note that
not all MS Windows applications interpret the command line the same
way: The list2cmdline is designed for applications using the same
rules as the MS C runtime.
bufsize, if given, has the same meaning as the corresponding argument
to the built-in open() function: 0 means unbuffered, 1 means line
buffered, any other positive value means use a buffer of
(approximately) that size. A negative bufsize means to use the system
default, which usually means fully buffered. The default value for
bufsize is 0 (unbuffered).
stdin, stdout and stderr specify the executed programs' standard
input, standard output and standard error file handles, respectively.
Valid values are PIPE, an existing file descriptor (a positive
integer), an existing file object, and None. PIPE indicates that a
new pipe to the child should be created. With None, no redirection
will occur; the child's file handles will be inherited from the
parent. Additionally, stderr can be STDOUT, which indicates that the
stderr data from the applications should be captured into the same
file handle as for stdout.
If preexec_fn is set to a callable object, this object will be called
in the child process just before the child is executed.
If close_fds is true, all file descriptors except 0, 1 and 2 will be
closed before the child process is executed.
if shell is true, the specified command will be executed through the
shell.
If cwd is not None, the current directory will be changed to cwd
before the child is executed.
If env is not None, it defines the environment variables for the new
process.
If universal_newlines is true, the file objects stdout and stderr are
opened as a text files, but lines may be terminated by any of '\n',
the Unix end-of-line convention, '\r', the Macintosh convention or
'\r\n', the Windows convention. All of these external representations
are seen as '\n' by the Python program. Note: This feature is only
available if Python is built with universal newline support (the
default). Also, the newlines attribute of the file objects stdout,
stdin and stderr are not updated by the communicate() method.
The startupinfo and creationflags, if given, will be passed to the
underlying CreateProcess() function. They can specify things such as
appearance of the main window and priority for the new process.
(Windows only)
This module also defines two shortcut functions:
call(*popenargs, **kwargs):
Run command with arguments. Wait for command to complete, then
return the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
retcode = call(["ls", "-l"])
check_call(*popenargs, **kwargs):
Run command with arguments. Wait for command to complete. If the
exit code was zero then return, otherwise raise
CalledProcessError. The CalledProcessError object will have the
return code in the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
check_call(["ls", "-l"])
Exceptions
----------
Exceptions raised in the child process, before the new program has
started to execute, will be re-raised in the parent. Additionally,
the exception object will have one extra attribute called
'child_traceback', which is a string containing traceback information
from the childs point of view.
The most common exception raised is OSError. This occurs, for
example, when trying to execute a non-existent file. Applications
should prepare for OSErrors.
A ValueError will be raised if Popen is called with invalid arguments.
check_call() will raise CalledProcessError, if the called process
returns a non-zero return code.
Security
--------
Unlike some other popen functions, this implementation will never call
/bin/sh implicitly. This means that all characters, including shell
metacharacters, can safely be passed to child processes.
Popen objects
=============
Instances of the Popen class have the following methods:
poll()
Check if child process has terminated. Returns returncode
attribute.
wait()
Wait for child process to terminate. Returns returncode attribute.
communicate(input=None)
Interact with process: Send data to stdin. Read data from stdout
and stderr, until end-of-file is reached. Wait for process to
terminate. The optional stdin argument should be a string to be
sent to the child process, or None, if no data should be sent to
the child.
communicate() returns a tuple (stdout, stderr).
Note: The data read is buffered in memory, so do not use this
method if the data size is large or unlimited.
The following attributes are also available:
stdin
If the stdin argument is PIPE, this attribute is a file object
that provides input to the child process. Otherwise, it is None.
stdout
If the stdout argument is PIPE, this attribute is a file object
that provides output from the child process. Otherwise, it is
None.
stderr
If the stderr argument is PIPE, this attribute is file object that
provides error output from the child process. Otherwise, it is
None.
pid
The process ID of the child process.
returncode
The child return code. A None value indicates that the process
hasn't terminated yet. A negative value -N indicates that the
child was terminated by signal N (UNIX only).
Replacing older functions with the subprocess module
====================================================
In this section, "a ==> b" means that b can be used as a replacement
for a.
Note: All functions in this section fail (more or less) silently if
the executed program cannot be found; this module raises an OSError
exception.
In the following examples, we assume that the subprocess module is
imported with "from subprocess import *".
Replacing /bin/sh shell backquote
---------------------------------
output=`mycmd myarg`
==>
output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0]
Replacing shell pipe line
-------------------------
output=`dmesg | grep hda`
==>
p1 = Popen(["dmesg"], stdout=PIPE)
p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
output = p2.communicate()[0]
Replacing os.system()
---------------------
sts = os.system("mycmd" + " myarg")
==>
p = Popen("mycmd" + " myarg", shell=True)
pid, sts = os.waitpid(p.pid, 0)
Note:
* Calling the program through the shell is usually not required.
* It's easier to look at the returncode attribute than the
exitstatus.
A more real-world example would look like this:
try:
retcode = call("mycmd" + " myarg", shell=True)
if retcode < 0:
print >>sys.stderr, "Child was terminated by signal", -retcode
else:
print >>sys.stderr, "Child returned", retcode
except OSError, e:
print >>sys.stderr, "Execution failed:", e
Replacing os.spawn*
-------------------
P_NOWAIT example:
pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg")
==>
pid = Popen(["/bin/mycmd", "myarg"]).pid
P_WAIT example:
retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg")
==>
retcode = call(["/bin/mycmd", "myarg"])
Vector example:
os.spawnvp(os.P_NOWAIT, path, args)
==>
Popen([path] + args[1:])
Environment example:
os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env)
==>
Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"})
Replacing os.popen*
-------------------
pipe = os.popen(cmd, mode='r', bufsize)
==>
pipe = Popen(cmd, shell=True, bufsize=bufsize, stdout=PIPE).stdout
pipe = os.popen(cmd, mode='w', bufsize)
==>
pipe = Popen(cmd, shell=True, bufsize=bufsize, stdin=PIPE).stdin
(child_stdin, child_stdout) = os.popen2(cmd, mode, bufsize)
==>
p = Popen(cmd, shell=True, bufsize=bufsize,
stdin=PIPE, stdout=PIPE, close_fds=True)
(child_stdin, child_stdout) = (p.stdin, p.stdout)
(child_stdin,
child_stdout,
child_stderr) = os.popen3(cmd, mode, bufsize)
==>
p = Popen(cmd, shell=True, bufsize=bufsize,
stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
(child_stdin,
child_stdout,
child_stderr) = (p.stdin, p.stdout, p.stderr)
(child_stdin, child_stdout_and_stderr) = os.popen4(cmd, mode, bufsize)
==>
p = Popen(cmd, shell=True, bufsize=bufsize,
stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True)
(child_stdin, child_stdout_and_stderr) = (p.stdin, p.stdout)
Replacing popen2.*
------------------
Note: If the cmd argument to popen2 functions is a string, the command
is executed through /bin/sh. If it is a list, the command is directly
executed.
(child_stdout, child_stdin) = popen2.popen2("somestring", bufsize, mode)
==>
p = Popen(["somestring"], shell=True, bufsize=bufsize
stdin=PIPE, stdout=PIPE, close_fds=True)
(child_stdout, child_stdin) = (p.stdout, p.stdin)
(child_stdout, child_stdin) = popen2.popen2(["mycmd", "myarg"], bufsize, mode)
==>
p = Popen(["mycmd", "myarg"], bufsize=bufsize,
stdin=PIPE, stdout=PIPE, close_fds=True)
(child_stdout, child_stdin) = (p.stdout, p.stdin)
The popen2.Popen3 and popen3.Popen4 basically works as subprocess.Popen,
except that:
* subprocess.Popen raises an exception if the execution fails
* the capturestderr argument is replaced with the stderr argument.
* stdin=PIPE and stdout=PIPE must be specified.
* popen2 closes all filedescriptors by default, but you have to specify
close_fds=True with subprocess.Popen.
"""
import sys
mswindows = (sys.platform == "win32")
import os
import types
import traceback
# Exception classes used by this module.
class CalledProcessError(Exception):
"""This exception is raised when a process run by check_call() returns
a non-zero exit status. The exit status will be stored in the
returncode attribute."""
def __init__(self, returncode, cmd):
self.returncode = returncode
self.cmd = cmd
def __str__(self):
return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
if mswindows:
try:
import threading
except ImportError:
# SCons: the threading module is only used by the communicate()
# method, which we don't actually use, so don't worry if we
# can't import it.
pass
import msvcrt
try:
# Try to get _subprocess
from _subprocess import *
class STARTUPINFO(object):
dwFlags = 0
hStdInput = None
hStdOutput = None
hStdError = None
wShowWindow = 0
class pywintypes(object):
error = IOError
except ImportError:
# If not there, then drop back to requiring pywin32
# TODO: Should this be wrapped in try as well? To notify user to install
# pywin32 ? With URL to it?
import pywintypes
from win32api import GetStdHandle, STD_INPUT_HANDLE, \
STD_OUTPUT_HANDLE, STD_ERROR_HANDLE
from win32api import GetCurrentProcess, DuplicateHandle, \
GetModuleFileName, GetVersion
from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE
from win32pipe import CreatePipe
from win32process import CreateProcess, STARTUPINFO, \
GetExitCodeProcess, STARTF_USESTDHANDLES, \
STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE
from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0
else:
import select
import errno
import fcntl
import pickle
try:
fcntl.F_GETFD
except AttributeError:
fcntl.F_GETFD = 1
try:
fcntl.F_SETFD
except AttributeError:
fcntl.F_SETFD = 2
__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"]
try:
MAXFD = os.sysconf("SC_OPEN_MAX")
except KeyboardInterrupt:
raise # SCons: don't swallow keyboard interrupts
except:
MAXFD = 256
try:
isinstance(1, int)
except TypeError:
def is_int(obj):
return isinstance(obj, type(1))
def is_int_or_long(obj):
return type(obj) in (type(1), type(1L))
else:
def is_int(obj):
return isinstance(obj, int)
def is_int_or_long(obj):
return isinstance(obj, (int, long))
try:
types.StringTypes
except AttributeError:
try:
types.StringTypes = (str, unicode)
except NameError:
types.StringTypes = (str,)
def is_string(obj):
return isinstance(obj, types.StringTypes)
_active = []
def _cleanup():
for inst in _active[:]:
if inst.poll(_deadstate=sys.maxsize) >= 0:
try:
_active.remove(inst)
except ValueError:
# This can happen if two threads create a new Popen instance.
# It's harmless that it was already removed, so ignore.
pass
PIPE = -1
STDOUT = -2
def call(*popenargs, **kwargs):
"""Run command with arguments. Wait for command to complete, then
return the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
retcode = call(["ls", "-l"])
"""
return apply(Popen, popenargs, kwargs).wait()
def check_call(*popenargs, **kwargs):
"""Run command with arguments. Wait for command to complete. If
the exit code was zero then return, otherwise raise
CalledProcessError. The CalledProcessError object will have the
return code in the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
check_call(["ls", "-l"])
"""
retcode = call(*popenargs, **kwargs)
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
if retcode:
raise CalledProcessError(retcode, cmd)
return retcode
def list2cmdline(seq):
"""
Translate a sequence of arguments into a command line
string, using the same rules as the MS C runtime:
1) Arguments are delimited by white space, which is either a
space or a tab.
2) A string surrounded by double quotation marks is
interpreted as a single argument, regardless of white space
contained within. A quoted string can be embedded in an
argument.
3) A double quotation mark preceded by a backslash is
interpreted as a literal double quotation mark.
4) Backslashes are interpreted literally, unless they
immediately precede a double quotation mark.
5) If backslashes immediately precede a double quotation mark,
every pair of backslashes is interpreted as a literal
backslash. If the number of backslashes is odd, the last
backslash escapes the next double quotation mark as
described in rule 3.
"""
# See
# http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp
result = []
needquote = False
for arg in seq:
bs_buf = []
# Add a space to separate this argument from the others
if result:
result.append(' ')
needquote = (" " in arg) or ("\t" in arg)
if needquote:
result.append('"')
for c in arg:
if c == '\\':
# Don't know if we need to double yet.
bs_buf.append(c)
elif c == '"':
# Double backspaces.
result.append('\\' * len(bs_buf)*2)
bs_buf = []
result.append('\\"')
else:
# Normal char
if bs_buf:
result.extend(bs_buf)
bs_buf = []
result.append(c)
# Add remaining backspaces, if any.
if bs_buf:
result.extend(bs_buf)
if needquote:
result.extend(bs_buf)
result.append('"')
return ''.join(result)
class Popen(object):
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0):
"""Create new Popen instance."""
_cleanup()
self._child_created = False
if not is_int_or_long(bufsize):
raise TypeError("bufsize must be an integer")
if mswindows:
if preexec_fn is not None:
raise ValueError("preexec_fn is not supported on Windows "
"platforms")
if close_fds:
raise ValueError("close_fds is not supported on Windows "
"platforms")
else:
# POSIX
if startupinfo is not None:
raise ValueError("startupinfo is only supported on Windows "
"platforms")
if creationflags != 0:
raise ValueError("creationflags is only supported on Windows "
"platforms")
self.stdin = None
self.stdout = None
self.stderr = None
self.pid = None
self.returncode = None
self.universal_newlines = universal_newlines
# Input and output objects. The general principle is like
# this:
#
# Parent Child
# ------ -----
# p2cwrite ---stdin---> p2cread
# c2pread <--stdout--- c2pwrite
# errread <--stderr--- errwrite
#
# On POSIX, the child objects are file descriptors. On
# Windows, these are Windows file handles. The parent objects
# are file descriptors on both platforms. The parent objects
# are None when not using PIPEs. The child objects are None
# when not redirecting.
(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite) = self._get_handles(stdin, stdout, stderr)
self._execute_child(args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
if p2cwrite:
self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
if c2pread:
if universal_newlines:
self.stdout = os.fdopen(c2pread, 'rU', bufsize)
else:
self.stdout = os.fdopen(c2pread, 'rb', bufsize)
if errread:
if universal_newlines:
self.stderr = os.fdopen(errread, 'rU', bufsize)
else:
self.stderr = os.fdopen(errread, 'rb', bufsize)
def _translate_newlines(self, data):
data = data.replace("\r\n", "\n")
data = data.replace("\r", "\n")
return data
def __del__(self):
if not self._child_created:
# We didn't get to successfully create a child process.
return
# In case the child hasn't been waited on, check if it's done.
self.poll(_deadstate=sys.maxsize)
if self.returncode is None and _active is not None:
# Child is still running, keep us alive until we can wait on it.
_active.append(self)
def communicate(self, input=None):
"""Interact with process: Send data to stdin. Read data from
stdout and stderr, until end-of-file is reached. Wait for
process to terminate. The optional input argument should be a
string to be sent to the child process, or None, if no data
should be sent to the child.
communicate() returns a tuple (stdout, stderr)."""
# Optimization: If we are only using one pipe, or no pipe at
# all, using select() or threads is unnecessary.
if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
stdout = None
stderr = None
if self.stdin:
if input:
self.stdin.write(input)
self.stdin.close()
elif self.stdout:
stdout = self.stdout.read()
elif self.stderr:
stderr = self.stderr.read()
self.wait()
return (stdout, stderr)
return self._communicate(input)
if mswindows:
#
# Windows methods
#
def _get_handles(self, stdin, stdout, stderr):
"""Construct and return tupel with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
if stdin is None and stdout is None and stderr is None:
return (None, None, None, None, None, None)
p2cread, p2cwrite = None, None
c2pread, c2pwrite = None, None
errread, errwrite = None, None
if stdin is None:
p2cread = GetStdHandle(STD_INPUT_HANDLE)
elif stdin == PIPE:
p2cread, p2cwrite = CreatePipe(None, 0)
# Detach and turn into fd
p2cwrite = p2cwrite.Detach()
p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0)
elif is_int(stdin):
p2cread = msvcrt.get_osfhandle(stdin)
else:
# Assuming file-like object
p2cread = msvcrt.get_osfhandle(stdin.fileno())
p2cread = self._make_inheritable(p2cread)
if stdout is None:
c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE)
elif stdout == PIPE:
c2pread, c2pwrite = CreatePipe(None, 0)
# Detach and turn into fd
c2pread = c2pread.Detach()
c2pread = msvcrt.open_osfhandle(c2pread, 0)
elif is_int(stdout):
c2pwrite = msvcrt.get_osfhandle(stdout)
else:
# Assuming file-like object
c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
c2pwrite = self._make_inheritable(c2pwrite)
if stderr is None:
errwrite = GetStdHandle(STD_ERROR_HANDLE)
elif stderr == PIPE:
errread, errwrite = CreatePipe(None, 0)
# Detach and turn into fd
errread = errread.Detach()
errread = msvcrt.open_osfhandle(errread, 0)
elif stderr == STDOUT:
errwrite = c2pwrite
elif is_int(stderr):
errwrite = msvcrt.get_osfhandle(stderr)
else:
# Assuming file-like object
errwrite = msvcrt.get_osfhandle(stderr.fileno())
errwrite = self._make_inheritable(errwrite)
return (p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _make_inheritable(self, handle):
"""Return a duplicate of handle, which is inheritable"""
return DuplicateHandle(GetCurrentProcess(), handle,
GetCurrentProcess(), 0, 1,
DUPLICATE_SAME_ACCESS)
def _find_w9xpopen(self):
"""Find and return absolut path to w9xpopen.exe"""
w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)),
"w9xpopen.exe")
if not os.path.exists(w9xpopen):
# Eeek - file-not-found - possibly an embedding
# situation - see if we can locate it in sys.exec_prefix
w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix),
"w9xpopen.exe")
if not os.path.exists(w9xpopen):
raise RuntimeError("Cannot locate w9xpopen.exe, which is "
"needed for Popen to work with your "
"shell or platform.")
return w9xpopen
def _execute_child(self, args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite):
"""Execute program (MS Windows version)"""
if not isinstance(args, types.StringTypes):
args = list2cmdline(args)
# Process startup details
if startupinfo is None:
startupinfo = STARTUPINFO()
if None not in (p2cread, c2pwrite, errwrite):
startupinfo.dwFlags = startupinfo.dwFlags | STARTF_USESTDHANDLES
startupinfo.hStdInput = p2cread
startupinfo.hStdOutput = c2pwrite
startupinfo.hStdError = errwrite
if shell:
startupinfo.dwFlags = startupinfo.dwFlags | STARTF_USESHOWWINDOW
startupinfo.wShowWindow = SW_HIDE
comspec = os.environ.get("COMSPEC", "cmd.exe")
args = comspec + " /c " + args
if (GetVersion() >= 0x80000000L or
os.path.basename(comspec).lower() == "command.com"):
# Win9x, or using command.com on NT. We need to
# use the w9xpopen intermediate program. For more
# information, see KB Q150956
# (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp)
w9xpopen = self._find_w9xpopen()
args = '"%s" %s' % (w9xpopen, args)
# Not passing CREATE_NEW_CONSOLE has been known to
# cause random failures on win9x. Specifically a
# dialog: "Your program accessed mem currently in
# use at xxx" and a hopeful warning about the
# stability of your system. Cost is Ctrl+C wont
# kill children.
creationflags = creationflags | CREATE_NEW_CONSOLE
# Start the process
try:
hp, ht, pid, tid = CreateProcess(executable, args,
# no special security
None, None,
# must inherit handles to pass std
# handles
1,
creationflags,
env,
cwd,
startupinfo)
except pywintypes.error, e:
# Translate pywintypes.error to WindowsError, which is
# a subclass of OSError. FIXME: We should really
# translate errno using _sys_errlist (or simliar), but
# how can this be done from Python?
raise WindowsError(*e.args)
# Retain the process handle, but close the thread handle
self._child_created = True
self._handle = hp
self.pid = pid
ht.Close()
# Child is launched. Close the parent's copy of those pipe
# handles that only the child should have open. You need
# to make sure that no handles to the write end of the
# output pipe are maintained in this process or else the
# pipe will not close when the child process exits and the
# ReadFile will hang.
if p2cread is not None:
p2cread.Close()
if c2pwrite is not None:
c2pwrite.Close()
if errwrite is not None:
errwrite.Close()
def poll(self, _deadstate=None):
"""Check if child process has terminated. Returns returncode
attribute."""
if self.returncode is None:
if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
self.returncode = GetExitCodeProcess(self._handle)
return self.returncode
def wait(self):
"""Wait for child process to terminate. Returns returncode
attribute."""
if self.returncode is None:
obj = WaitForSingleObject(self._handle, INFINITE)
self.returncode = GetExitCodeProcess(self._handle)
return self.returncode
def _readerthread(self, fh, buffer):
buffer.append(fh.read())
def _communicate(self, input):
stdout = None # Return
stderr = None # Return
if self.stdout:
stdout = []
stdout_thread = threading.Thread(target=self._readerthread,
args=(self.stdout, stdout))
stdout_thread.setDaemon(True)
stdout_thread.start()
if self.stderr:
stderr = []
stderr_thread = threading.Thread(target=self._readerthread,
args=(self.stderr, stderr))
stderr_thread.setDaemon(True)
stderr_thread.start()
if self.stdin:
if input is not None:
self.stdin.write(input)
self.stdin.close()
if self.stdout:
stdout_thread.join()
if self.stderr:
stderr_thread.join()
# All data exchanged. Translate lists into strings.
if stdout is not None:
stdout = stdout[0]
if stderr is not None:
stderr = stderr[0]
# Translate newlines, if requested. We cannot let the file
# object do the translation: It is based on stdio, which is
# impossible to combine with select (unless forcing no
# buffering).
if self.universal_newlines and hasattr(file, 'newlines'):
if stdout:
stdout = self._translate_newlines(stdout)
if stderr:
stderr = self._translate_newlines(stderr)
self.wait()
return (stdout, stderr)
else:
#
# POSIX methods
#
def _get_handles(self, stdin, stdout, stderr):
"""Construct and return tupel with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
p2cread, p2cwrite = None, None
c2pread, c2pwrite = None, None
errread, errwrite = None, None
if stdin is None:
pass
elif stdin == PIPE:
p2cread, p2cwrite = os.pipe()
elif is_int(stdin):
p2cread = stdin
else:
# Assuming file-like object
p2cread = stdin.fileno()
if stdout is None:
pass
elif stdout == PIPE:
c2pread, c2pwrite = os.pipe()
elif is_int(stdout):
c2pwrite = stdout
else:
# Assuming file-like object
c2pwrite = stdout.fileno()
if stderr is None:
pass
elif stderr == PIPE:
errread, errwrite = os.pipe()
elif stderr == STDOUT:
errwrite = c2pwrite
elif is_int(stderr):
errwrite = stderr
else:
# Assuming file-like object
errwrite = stderr.fileno()
return (p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _set_cloexec_flag(self, fd):
try:
cloexec_flag = fcntl.FD_CLOEXEC
except AttributeError:
cloexec_flag = 1
old = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag)
def _close_fds(self, but):
for i in range(3, MAXFD):
if i == but:
continue
try:
os.close(i)
except KeyboardInterrupt:
raise # SCons: don't swallow keyboard interrupts
except:
pass
def _execute_child(self, args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite):
"""Execute program (POSIX version)"""
if is_string(args):
args = [args]
if shell:
args = ["/bin/sh", "-c"] + args
if executable is None:
executable = args[0]
# For transferring possible exec failure from child to parent
# The first char specifies the exception type: 0 means
# OSError, 1 means some other error.
errpipe_read, errpipe_write = os.pipe()
self._set_cloexec_flag(errpipe_write)
self.pid = os.fork()
self._child_created = True
if self.pid == 0:
# Child
try:
# Close parent's pipe ends
if p2cwrite:
os.close(p2cwrite)
if c2pread:
os.close(c2pread)
if errread:
os.close(errread)
os.close(errpipe_read)
# Dup fds for child
if p2cread:
os.dup2(p2cread, 0)
if c2pwrite:
os.dup2(c2pwrite, 1)
if errwrite:
os.dup2(errwrite, 2)
# Close pipe fds. Make sure we don't close the same
# fd more than once, or standard fds.
try:
set
except NameError:
# Fall-back for earlier Python versions, so epydoc
# can use this module directly to execute things.
if p2cread:
os.close(p2cread)
if c2pwrite and c2pwrite not in (p2cread,):
os.close(c2pwrite)
if errwrite and errwrite not in (p2cread, c2pwrite):
os.close(errwrite)
else:
for fd in set((p2cread, c2pwrite, errwrite))-set((0,1,2)):
if fd: os.close(fd)
# Close all other fds, if asked for
if close_fds:
self._close_fds(but=errpipe_write)
if cwd is not None:
os.chdir(cwd)
if preexec_fn:
apply(preexec_fn)
if env is None:
os.execvp(executable, args)
else:
os.execvpe(executable, args, env)
except KeyboardInterrupt:
raise # SCons: don't swallow keyboard interrupts
except:
exc_type, exc_value, tb = sys.exc_info()
# Save the traceback and attach it to the exception object
exc_lines = traceback.format_exception(exc_type,
exc_value,
tb)
exc_value.child_traceback = ''.join(exc_lines)
os.write(errpipe_write, pickle.dumps(exc_value))
# This exitcode won't be reported to applications, so it
# really doesn't matter what we return.
os._exit(255)
# Parent
os.close(errpipe_write)
if p2cread and p2cwrite:
os.close(p2cread)
if c2pwrite and c2pread:
os.close(c2pwrite)
if errwrite and errread:
os.close(errwrite)
# Wait for exec to fail or succeed; possibly raising exception
data = os.read(errpipe_read, 1048576) # Exceptions limited to 1 MB
os.close(errpipe_read)
if data != "":
os.waitpid(self.pid, 0)
child_exception = pickle.loads(data)
raise child_exception
def _handle_exitstatus(self, sts):
if os.WIFSIGNALED(sts):
self.returncode = -os.WTERMSIG(sts)
elif os.WIFEXITED(sts):
self.returncode = os.WEXITSTATUS(sts)
else:
# Should never happen
raise RuntimeError("Unknown child exit status!")
def poll(self, _deadstate=None):
"""Check if child process has terminated. Returns returncode
attribute."""
if self.returncode is None:
try:
pid, sts = os.waitpid(self.pid, os.WNOHANG)
if pid == self.pid:
self._handle_exitstatus(sts)
except os.error:
if _deadstate is not None:
self.returncode = _deadstate
return self.returncode
def wait(self):
"""Wait for child process to terminate. Returns returncode
attribute."""
if self.returncode is None:
pid, sts = os.waitpid(self.pid, 0)
self._handle_exitstatus(sts)
return self.returncode
def _communicate(self, input):
read_set = []
write_set = []
stdout = None # Return
stderr = None # Return
if self.stdin:
# Flush stdio buffer. This might block, if the user has
# been writing to .stdin in an uncontrolled fashion.
self.stdin.flush()
if input:
write_set.append(self.stdin)
else:
self.stdin.close()
if self.stdout:
read_set.append(self.stdout)
stdout = []
if self.stderr:
read_set.append(self.stderr)
stderr = []
input_offset = 0
while read_set or write_set:
rlist, wlist, xlist = select.select(read_set, write_set, [])
if self.stdin in wlist:
# When select has indicated that the file is writable,
# we can write up to PIPE_BUF bytes without risk
# blocking. POSIX defines PIPE_BUF >= 512
m = memoryview(input)[input_offset:input_offset+512]
bytes_written = os.write(self.stdin.fileno(), m)
input_offset = input_offset + bytes_written
if input_offset >= len(input):
self.stdin.close()
write_set.remove(self.stdin)
if self.stdout in rlist:
data = os.read(self.stdout.fileno(), 1024)
if data == "":
self.stdout.close()
read_set.remove(self.stdout)
stdout.append(data)
if self.stderr in rlist:
data = os.read(self.stderr.fileno(), 1024)
if data == "":
self.stderr.close()
read_set.remove(self.stderr)
stderr.append(data)
# All data exchanged. Translate lists into strings.
if stdout is not None:
stdout = ''.join(stdout)
if stderr is not None:
stderr = ''.join(stderr)
# Translate newlines, if requested. We cannot let the file
# object do the translation: It is based on stdio, which is
# impossible to combine with select (unless forcing no
# buffering).
if self.universal_newlines and hasattr(file, 'newlines'):
if stdout:
stdout = self._translate_newlines(stdout)
if stderr:
stderr = self._translate_newlines(stderr)
self.wait()
return (stdout, stderr)
def _demo_posix():
#
# Example 1: Simple redirection: Get process list
#
plist = Popen(["ps"], stdout=PIPE).communicate()[0]
print "Process list:"
print plist
#
# Example 2: Change uid before executing child
#
if os.getuid() == 0:
p = Popen(["id"], preexec_fn=lambda: os.setuid(100))
p.wait()
#
# Example 3: Connecting several subprocesses
#
print "Looking for 'hda'..."
p1 = Popen(["dmesg"], stdout=PIPE)
p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
print repr(p2.communicate()[0])
#
# Example 4: Catch execution error
#
print
print "Trying a weird file..."
try:
print Popen(["/this/path/does/not/exist"]).communicate()
except OSError, e:
if e.errno == errno.ENOENT:
print "The file didn't exist. I thought so..."
print "Child traceback:"
print e.child_traceback
else:
print "Error", e.errno
else:
sys.stderr.write( "Gosh. No error.\n" )
def _demo_windows():
#
# Example 1: Connecting several subprocesses
#
print "Looking for 'PROMPT' in set output..."
p1 = Popen("set", stdout=PIPE, shell=True)
p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE)
print repr(p2.communicate()[0])
#
# Example 2: Simple execution of program
#
print "Executing calc..."
p = Popen("calc")
p.wait()
if __name__ == "__main__":
if mswindows:
_demo_windows()
else:
_demo_posix()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
apache-2.0
|
pcabido/socorro
|
socorro/lib/httpclient.py
|
11
|
2346
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import httplib
class HttpClient(object):
"""Class for doing HTTP requests to any server. Encapsulate python's httplib.
"""
def __init__(self, host, port, timeout=None):
"""Set the host, port and optional timeout for all HTTP requests ran by
this client.
"""
self.host = host
self.port = port
self.timeout = timeout
def __enter__(self):
self.conn = httplib.HTTPConnection(self.host, self.port,
timeout=self.timeout)
def __exit__(self, type, value, traceback):
self.conn.close()
def _process_response(self):
"""Return a JSON result after an HTTP Request.
Process the response of an HTTP Request and make it a JSON error if
it failed. Otherwise return the response's content.
"""
response = self.conn.getresponse()
if response.status == 200 or response.status == 201:
data = response.read()
else:
data = {
"error": {
"code": response.status,
"reason": response.reason,
"data": response.read()
}
}
return data
def get(self, url):
"""Send a HTTP GET request to a URL and return the result.
"""
self.conn.request("GET", url)
return self._process_response()
def post(self, url, data):
"""Send a HTTP POST request to a URL and return the result.
"""
headers = {
"Content-type": "application/x-www-form-urlencoded",
"Accept": "text/json"
}
self.conn.request("POST", url, data, headers)
return self._process_response()
def put(self, url, data=None):
"""Send a HTTP PUT request to a URL and return the result.
"""
self.conn.request("PUT", url, data)
return self._process_response()
def delete(self, url):
"""Send a HTTP DELETE request to a URL and return the result.
"""
self.conn.request("DELETE", url)
return self._process_response()
|
mpl-2.0
|
EdDev/vdsm
|
tests/throttledlog_test.py
|
1
|
3208
|
#
# Copyright 2016 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import logging
from vdsm import throttledlog
from monkeypatch import MonkeyPatch
from testlib import VdsmTestCase
class FakeLogger(object):
def __init__(self, level):
self.level = level
self.messages = []
def isEnabledFor(self, level):
return level >= self.level
def log(self, level, message, *args):
if not self.isEnabledFor(level):
return
self.messages.append(message % args)
class FakeTime(object):
def __init__(self):
self.time = 0.0
def __call__(self):
return self.time
class TestThrottledLogging(VdsmTestCase):
@MonkeyPatch(throttledlog, "_logger", FakeLogger(logging.DEBUG))
def test_throttled_logging(self):
throttledlog.throttle('test', 3)
for i in range(5):
throttledlog.debug('test', "Cycle: %s", i)
self.assertEqual(throttledlog._logger.messages,
['Cycle: 0', 'Cycle: 3'])
@MonkeyPatch(throttledlog, "_logger", FakeLogger(logging.INFO))
def test_no_logging(self):
throttledlog.throttle('test', 3)
for i in range(5):
throttledlog.debug('test', "Cycle: %s", i)
self.assertEqual(throttledlog._logger.messages, [])
@MonkeyPatch(throttledlog, "_logger", FakeLogger(logging.DEBUG))
def test_default(self):
throttledlog.throttle('test', 3)
for i in range(5):
throttledlog.debug('other', "Cycle: %s", i)
self.assertEqual(throttledlog._logger.messages,
['Cycle: %s' % (i,) for i in range(5)])
@MonkeyPatch(throttledlog, "_logger", FakeLogger(logging.DEBUG))
@MonkeyPatch(throttledlog, "monotonic_time", FakeTime())
def test_timeout(self):
throttledlog.throttle('test', 10, timeout=7)
for i in range(12):
throttledlog.debug('test', "Cycle: %s", i)
throttledlog.monotonic_time.time += 1.0
self.assertEqual(throttledlog._logger.messages,
['Cycle: %s' % (i,) for i in (0, 7, 10,)])
@MonkeyPatch(throttledlog, "_logger", FakeLogger(logging.WARNING))
def test_logging_warning(self):
throttledlog.throttle('test', 4)
for i in range(7):
throttledlog.warning('test', "Cycle: %s", i)
self.assertEqual(throttledlog._logger.messages,
['Cycle: 0', 'Cycle: 4'])
|
gpl-2.0
|
js0701/chromium-crosswalk
|
chrome/test/data/safe_browsing/safe_browsing_testserver.py
|
50
|
1495
|
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wraps the upstream safebrowsing_test_server.py to run in Chrome tests."""
import os
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(BASE_DIR, '..', '..', '..', '..', 'net',
'tools', 'testserver'))
import testserver_base
class ServerRunner(testserver_base.TestServerRunner):
"""TestServerRunner for safebrowsing_test_server.py."""
def create_server(self, server_data):
sys.path.append(os.path.join(BASE_DIR, '..', '..', '..', '..',
'third_party', 'safe_browsing', 'testing'))
import safebrowsing_test_server
server = safebrowsing_test_server.SetupServer(
self.options.data_file, self.options.host, self.options.port,
opt_enforce_caching=False, opt_validate_database=True)
print 'Safebrowsing HTTP server started on port %d...' % server.server_port
server_data['port'] = server.server_port
return server
def add_options(self):
testserver_base.TestServerRunner.add_options(self)
self.option_parser.add_option('--data-file', dest='data_file',
help='File containing safebrowsing test '
'data and expectations')
if __name__ == '__main__':
sys.exit(ServerRunner().main())
|
bsd-3-clause
|
Glasgow2015/team-10
|
env/lib/python2.7/site-packages/django/core/management/commands/createcachetable.py
|
96
|
3927
|
from django.conf import settings
from django.core.cache import caches
from django.core.cache.backends.db import BaseDatabaseCache
from django.core.management.base import BaseCommand, CommandError
from django.db import (
DEFAULT_DB_ALIAS, connections, models, router, transaction,
)
from django.db.utils import DatabaseError
from django.utils.encoding import force_text
class Command(BaseCommand):
help = "Creates the tables needed to use the SQL cache backend."
requires_system_checks = False
def add_arguments(self, parser):
parser.add_argument('args', metavar='table_name', nargs='*',
help='Optional table names. Otherwise, settings.CACHES is used to '
'find cache tables.')
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS,
help='Nominates a database onto which the cache tables will be '
'installed. Defaults to the "default" database.')
def handle(self, *tablenames, **options):
db = options.get('database')
self.verbosity = int(options.get('verbosity'))
if len(tablenames):
# Legacy behavior, tablename specified as argument
for tablename in tablenames:
self.create_table(db, tablename)
else:
for cache_alias in settings.CACHES:
cache = caches[cache_alias]
if isinstance(cache, BaseDatabaseCache):
self.create_table(db, cache._table)
def create_table(self, database, tablename):
cache = BaseDatabaseCache(tablename, {})
if not router.allow_migrate_model(database, cache.cache_model_class):
return
connection = connections[database]
if tablename in connection.introspection.table_names():
if self.verbosity > 0:
self.stdout.write("Cache table '%s' already exists." % tablename)
return
fields = (
# "key" is a reserved word in MySQL, so use "cache_key" instead.
models.CharField(name='cache_key', max_length=255, unique=True, primary_key=True),
models.TextField(name='value'),
models.DateTimeField(name='expires', db_index=True),
)
table_output = []
index_output = []
qn = connection.ops.quote_name
for f in fields:
field_output = [qn(f.name), f.db_type(connection=connection)]
field_output.append("%sNULL" % ("NOT " if not f.null else ""))
if f.primary_key:
field_output.append("PRIMARY KEY")
elif f.unique:
field_output.append("UNIQUE")
if f.db_index:
unique = "UNIQUE " if f.unique else ""
index_output.append("CREATE %sINDEX %s ON %s (%s);" %
(unique, qn('%s_%s' % (tablename, f.name)), qn(tablename),
qn(f.name)))
table_output.append(" ".join(field_output))
full_statement = ["CREATE TABLE %s (" % qn(tablename)]
for i, line in enumerate(table_output):
full_statement.append(' %s%s' % (line, ',' if i < len(table_output) - 1 else ''))
full_statement.append(');')
with transaction.atomic(using=database,
savepoint=connection.features.can_rollback_ddl):
with connection.cursor() as curs:
try:
curs.execute("\n".join(full_statement))
except DatabaseError as e:
raise CommandError(
"Cache table '%s' could not be created.\nThe error was: %s." %
(tablename, force_text(e)))
for statement in index_output:
curs.execute(statement)
if self.verbosity > 1:
self.stdout.write("Cache table '%s' created." % tablename)
|
apache-2.0
|
akaariai/django
|
tests/view_tests/tests/test_csrf.py
|
23
|
3190
|
from django.test import Client, TestCase, override_settings
from django.utils.translation import override
@override_settings(ROOT_URLCONF="view_tests.urls")
class CsrfViewTests(TestCase):
def setUp(self):
super(CsrfViewTests, self).setUp()
self.client = Client(enforce_csrf_checks=True)
@override_settings(
USE_I18N=True,
MIDDLEWARE_CLASSES=[
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
],
)
def test_translation(self):
"""
Test that an invalid request is rejected with a localized error message.
"""
response = self.client.post('/')
self.assertContains(response, "Forbidden", status_code=403)
self.assertContains(response,
"CSRF verification failed. Request aborted.",
status_code=403)
with self.settings(LANGUAGE_CODE='nl'), override('en-us'):
response = self.client.post('/')
self.assertContains(response, "Verboden", status_code=403)
self.assertContains(response,
"CSRF-verificatie mislukt. Verzoek afgebroken.",
status_code=403)
@override_settings(
SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTO', 'https')
)
def test_no_referer(self):
"""
Referer header is strictly checked for POST over HTTPS. Trigger the
exception by sending an incorrect referer.
"""
response = self.client.post('/', HTTP_X_FORWARDED_PROTO='https')
self.assertContains(response,
"You are seeing this message because this HTTPS "
"site requires a 'Referer header' to be "
"sent by your Web browser, but none was sent.",
status_code=403)
def test_no_cookies(self):
"""
The CSRF cookie is checked for POST. Failure to send this cookie should
provide a nice error message.
"""
response = self.client.post('/')
self.assertContains(response,
"You are seeing this message because this site "
"requires a CSRF cookie when submitting forms. "
"This cookie is required for security reasons, to "
"ensure that your browser is not being hijacked "
"by third parties.",
status_code=403)
# In Django 2.0, this can be changed to TEMPLATES=[] because the code path
# that reads the TEMPLATE_* settings in that case will have been removed.
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.dummy.TemplateStrings',
}])
def test_no_django_template_engine(self):
"""
The CSRF view doesn't depend on the TEMPLATES configuration (#24388).
"""
response = self.client.post('/')
self.assertContains(response, "Forbidden", status_code=403)
|
bsd-3-clause
|
zerkrx/zerkbox
|
lib/youtube_dl/extractor/niconico.py
|
17
|
10092
|
# coding: utf-8
from __future__ import unicode_literals
import re
import json
import datetime
from .common import InfoExtractor
from ..compat import (
compat_urlparse,
)
from ..utils import (
ExtractorError,
int_or_none,
parse_duration,
parse_iso8601,
sanitized_Request,
xpath_text,
determine_ext,
urlencode_postdata,
)
class NiconicoIE(InfoExtractor):
IE_NAME = 'niconico'
IE_DESC = 'ニコニコ動画'
_TESTS = [{
'url': 'http://www.nicovideo.jp/watch/sm22312215',
'md5': 'd1a75c0823e2f629128c43e1212760f9',
'info_dict': {
'id': 'sm22312215',
'ext': 'mp4',
'title': 'Big Buck Bunny',
'uploader': 'takuya0301',
'uploader_id': '2698420',
'upload_date': '20131123',
'timestamp': 1385182762,
'description': '(c) copyright 2008, Blender Foundation / www.bigbuckbunny.org',
'duration': 33,
},
'skip': 'Requires an account',
}, {
# File downloaded with and without credentials are different, so omit
# the md5 field
'url': 'http://www.nicovideo.jp/watch/nm14296458',
'info_dict': {
'id': 'nm14296458',
'ext': 'swf',
'title': '【鏡音リン】Dance on media【オリジナル】take2!',
'description': 'md5:689f066d74610b3b22e0f1739add0f58',
'uploader': 'りょうた',
'uploader_id': '18822557',
'upload_date': '20110429',
'timestamp': 1304065916,
'duration': 209,
},
'skip': 'Requires an account',
}, {
# 'video exists but is marked as "deleted"
# md5 is unstable
'url': 'http://www.nicovideo.jp/watch/sm10000',
'info_dict': {
'id': 'sm10000',
'ext': 'unknown_video',
'description': 'deleted',
'title': 'ドラえもんエターナル第3話「決戦第3新東京市」<前編>',
'upload_date': '20071224',
'timestamp': int, # timestamp field has different value if logged in
'duration': 304,
},
'skip': 'Requires an account',
}, {
'url': 'http://www.nicovideo.jp/watch/so22543406',
'info_dict': {
'id': '1388129933',
'ext': 'mp4',
'title': '【第1回】RADIOアニメロミックス ラブライブ!~のぞえりRadio Garden~',
'description': 'md5:b27d224bb0ff53d3c8269e9f8b561cf1',
'timestamp': 1388851200,
'upload_date': '20140104',
'uploader': 'アニメロチャンネル',
'uploader_id': '312',
},
'skip': 'The viewing period of the video you were searching for has expired.',
}]
_VALID_URL = r'https?://(?:www\.|secure\.)?nicovideo\.jp/watch/(?P<id>(?:[a-z]{2})?[0-9]+)'
_NETRC_MACHINE = 'niconico'
def _real_initialize(self):
self._login()
def _login(self):
(username, password) = self._get_login_info()
# No authentication to be performed
if not username:
return True
# Log in
login_form_strs = {
'mail': username,
'password': password,
}
login_data = urlencode_postdata(login_form_strs)
request = sanitized_Request(
'https://secure.nicovideo.jp/secure/login', login_data)
login_results = self._download_webpage(
request, None, note='Logging in', errnote='Unable to log in')
if re.search(r'(?i)<h1 class="mb8p4">Log in error</h1>', login_results) is not None:
self._downloader.report_warning('unable to log in: bad username or password')
return False
return True
def _real_extract(self, url):
video_id = self._match_id(url)
# Get video webpage. We are not actually interested in it for normal
# cases, but need the cookies in order to be able to download the
# info webpage
webpage, handle = self._download_webpage_handle(
'http://www.nicovideo.jp/watch/' + video_id, video_id)
if video_id.startswith('so'):
video_id = self._match_id(handle.geturl())
video_info = self._download_xml(
'http://ext.nicovideo.jp/api/getthumbinfo/' + video_id, video_id,
note='Downloading video info page')
# Get flv info
flv_info_webpage = self._download_webpage(
'http://flapi.nicovideo.jp/api/getflv/' + video_id + '?as3=1',
video_id, 'Downloading flv info')
flv_info = compat_urlparse.parse_qs(flv_info_webpage)
if 'url' not in flv_info:
if 'deleted' in flv_info:
raise ExtractorError('The video has been deleted.',
expected=True)
elif 'closed' in flv_info:
raise ExtractorError('Niconico videos now require logging in',
expected=True)
else:
raise ExtractorError('Unable to find video URL')
video_real_url = flv_info['url'][0]
# Start extracting information
title = xpath_text(video_info, './/title')
if not title:
title = self._og_search_title(webpage, default=None)
if not title:
title = self._html_search_regex(
r'<span[^>]+class="videoHeaderTitle"[^>]*>([^<]+)</span>',
webpage, 'video title')
watch_api_data_string = self._html_search_regex(
r'<div[^>]+id="watchAPIDataContainer"[^>]+>([^<]+)</div>',
webpage, 'watch api data', default=None)
watch_api_data = self._parse_json(watch_api_data_string, video_id) if watch_api_data_string else {}
video_detail = watch_api_data.get('videoDetail', {})
extension = xpath_text(video_info, './/movie_type')
if not extension:
extension = determine_ext(video_real_url)
thumbnail = (
xpath_text(video_info, './/thumbnail_url') or
self._html_search_meta('image', webpage, 'thumbnail', default=None) or
video_detail.get('thumbnail'))
description = xpath_text(video_info, './/description')
timestamp = parse_iso8601(xpath_text(video_info, './/first_retrieve'))
if not timestamp:
match = self._html_search_meta('datePublished', webpage, 'date published', default=None)
if match:
timestamp = parse_iso8601(match.replace('+', ':00+'))
if not timestamp and video_detail.get('postedAt'):
timestamp = parse_iso8601(
video_detail['postedAt'].replace('/', '-'),
delimiter=' ', timezone=datetime.timedelta(hours=9))
view_count = int_or_none(xpath_text(video_info, './/view_counter'))
if not view_count:
match = self._html_search_regex(
r'>Views: <strong[^>]*>([^<]+)</strong>',
webpage, 'view count', default=None)
if match:
view_count = int_or_none(match.replace(',', ''))
view_count = view_count or video_detail.get('viewCount')
comment_count = int_or_none(xpath_text(video_info, './/comment_num'))
if not comment_count:
match = self._html_search_regex(
r'>Comments: <strong[^>]*>([^<]+)</strong>',
webpage, 'comment count', default=None)
if match:
comment_count = int_or_none(match.replace(',', ''))
comment_count = comment_count or video_detail.get('commentCount')
duration = (parse_duration(
xpath_text(video_info, './/length') or
self._html_search_meta(
'video:duration', webpage, 'video duration', default=None)) or
video_detail.get('length'))
webpage_url = xpath_text(video_info, './/watch_url') or url
if video_info.find('.//ch_id') is not None:
uploader_id = video_info.find('.//ch_id').text
uploader = video_info.find('.//ch_name').text
elif video_info.find('.//user_id') is not None:
uploader_id = video_info.find('.//user_id').text
uploader = video_info.find('.//user_nickname').text
else:
uploader_id = uploader = None
return {
'id': video_id,
'url': video_real_url,
'title': title,
'ext': extension,
'format_id': 'economy' if video_real_url.endswith('low') else 'normal',
'thumbnail': thumbnail,
'description': description,
'uploader': uploader,
'timestamp': timestamp,
'uploader_id': uploader_id,
'view_count': view_count,
'comment_count': comment_count,
'duration': duration,
'webpage_url': webpage_url,
}
class NiconicoPlaylistIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?nicovideo\.jp/mylist/(?P<id>\d+)'
_TEST = {
'url': 'http://www.nicovideo.jp/mylist/27411728',
'info_dict': {
'id': '27411728',
'title': 'AKB48のオールナイトニッポン',
},
'playlist_mincount': 225,
}
def _real_extract(self, url):
list_id = self._match_id(url)
webpage = self._download_webpage(url, list_id)
entries_json = self._search_regex(r'Mylist\.preload\(\d+, (\[.*\])\);',
webpage, 'entries')
entries = json.loads(entries_json)
entries = [{
'_type': 'url',
'ie_key': NiconicoIE.ie_key(),
'url': ('http://www.nicovideo.jp/watch/%s' %
entry['item_data']['video_id']),
} for entry in entries]
return {
'_type': 'playlist',
'title': self._search_regex(r'\s+name: "(.*?)"', webpage, 'title'),
'id': list_id,
'entries': entries,
}
|
gpl-3.0
|
ravibhure/ansible
|
lib/ansible/modules/remote_management/ucs/ucs_vhba_template.py
|
50
|
11155
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: ucs_vhba_template
short_description: Configures vHBA templates on Cisco UCS Manager
description:
- Configures vHBA templates on Cisco UCS Manager.
- Examples can be used with the UCS Platform Emulator U(https://communities.cisco.com/ucspe).
extends_documentation_fragment: ucs
options:
state:
description:
- If C(present), will verify vHBA templates are present and will create if needed.
- If C(absent), will verify vHBA templates are absent and will delete if needed.
choices: [present, absent]
default: present
name:
description:
- The name of the virtual HBA template.
- This name can be between 1 and 16 alphanumeric characters.
- "You cannot use spaces or any special characters other than - (hyphen), \"_\" (underscore), : (colon), and . (period)."
- You cannot change this name after the template is created.
required: yes
description:
description:
- A user-defined description of the template.
- Enter up to 256 characters.
- "You can use any characters or spaces except the following:"
- "` (accent mark), \ (backslash), ^ (carat), \" (double quote), = (equal sign), > (greater than), < (less than), or ' (single quote)."
aliases: [ descr ]
fabric:
description:
- The Fabric ID field.
- The name of the fabric interconnect that vHBAs created with this template are associated with.
choices: [A, B]
default: A
redundancy_type:
description:
- The Redundancy Type used for template pairing from the Primary or Secondary redundancy template.
- "primary — Creates configurations that can be shared with the Secondary template."
- Any other shared changes on the Primary template are automatically synchronized to the Secondary template.
- "secondary — All shared configurations are inherited from the Primary template."
- "none - Legacy vHBA template behavior. Select this option if you do not want to use redundancy."
choices: [none, primary, secondary]
default: none
vsan:
description:
- The VSAN to associate with vHBAs created from this template.
default: default
template_type:
description:
- The Template Type field.
- "This can be one of the following:"
- "initial-template — vHBAs created from this template are not updated if the template changes."
- "updating-template - vHBAs created from this template are updated if the template changes."
choices: [initial-template, updating-template]
default: initial-template
max_data:
description:
- The Max Data Field Size field.
- The maximum size of the Fibre Channel frame payload bytes that the vHBA supports.
- Enter an string between '256' and '2112'.
default: '2048'
wwpn_pool:
description:
- The WWPN pool that a vHBA created from this template uses to derive its WWPN address.
default: default
qos_policy:
description:
- The QoS policy that is associated with vHBAs created from this template.
pin_group:
description:
- The SAN pin group that is associated with vHBAs created from this template.
stats_policy:
description:
- The statistics collection policy that is associated with vHBAs created from this template.
default: default
org_dn:
description:
- Org dn (distinguished name)
default: org-root
requirements:
- ucsmsdk
author:
- David Soper (@dsoper2)
- CiscoUcs (@CiscoUcs)
version_added: '2.5'
'''
EXAMPLES = r'''
- name: Configure vHBA template
ucs_vhba_template:
hostname: 172.16.143.150
username: admin
password: password
name: vHBA-A
fabric: A
vsan: VSAN-A
wwpn_pool: WWPN-Pool-A
- name: Remote vHBA template
ucs_vhba_template:
hostname: 172.16.143.150
username: admin
password: password
name: vHBA-A
state: absent
'''
RETURN = r'''
#
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.remote_management.ucs import UCSModule, ucs_argument_spec
def main():
argument_spec = ucs_argument_spec
argument_spec.update(
org_dn=dict(type='str', default='org-root'),
name=dict(type='str'),
descr=dict(type='str'),
fabric=dict(type='str', default='A', choices=['A', 'B']),
redundancy_type=dict(type='str', default='none', choices=['none', 'primary', 'secondary']),
vsan=dict(type='str', default='default'),
template_type=dict(type='str', default='initial-template', choices=['initial-template', 'updating-template']),
max_data=dict(type='str', default='2048'),
wwpn_pool=dict(type='str', default='default'),
qos_policy=dict(type='str'),
pin_group=dict(type='str'),
stats_policy=dict(type='str', default='default'),
state=dict(type='str', default='present', choices=['present', 'absent']),
vhba_template_list=dict(type='list'),
)
# Note that use of vhba_template_list is an experimental feature which allows multiple resource updates with a single UCSM connection.
# Support for vhba_template_list may change or be removed once persistent UCS connections are supported.
# Either vhba_template_list or name is required (user can specify either a list of single resource).
module = AnsibleModule(
argument_spec,
supports_check_mode=True,
required_one_of=[
['vhba_template_list', 'name']
],
mutually_exclusive=[
['vhba_template_list', 'name']
],
)
ucs = UCSModule(module)
err = False
from ucsmsdk.mometa.vnic.VnicSanConnTempl import VnicSanConnTempl
from ucsmsdk.mometa.vnic.VnicFcIf import VnicFcIf
changed = False
try:
# Only documented use is a single resource, but to also support experimental
# feature allowing multiple updates all params are converted to a vhba_template_list below.
if module.params['vhba_template_list']:
# directly use the list (single resource and list are mutually exclusive
vhba_template_list = module.params['vhba_template_list']
else:
# single resource specified, create list from the current params
vhba_template_list = [module.params]
for vhba_template in vhba_template_list:
mo_exists = False
props_match = False
# set default params. Done here to set values for lists which can't be done in the argument_spec
if not vhba_template.get('descr'):
vhba_template['descr'] = ''
if not vhba_template.get('fabric'):
vhba_template['fabric'] = 'A'
if not vhba_template.get('redundancy_type'):
vhba_template['redundancy_type'] = 'none'
if not vhba_template.get('vsan'):
vhba_template['vsan'] = 'default'
if not vhba_template.get('template_type'):
vhba_template['template_type'] = 'initial-template'
if not vhba_template.get('max_data'):
vhba_template['max_data'] = '2048'
if not vhba_template.get('wwpn_pool'):
vhba_template['wwpn_pool'] = 'default'
if not vhba_template.get('qos_policy'):
vhba_template['qos_policy'] = ''
if not vhba_template.get('pin_group'):
vhba_template['pin_group'] = ''
if not vhba_template.get('stats_policy'):
vhba_template['stats_policy'] = 'default'
# dn is <org_dn>/san-conn-templ-<name>
dn = module.params['org_dn'] + '/san-conn-templ-' + vhba_template['name']
mo = ucs.login_handle.query_dn(dn)
if mo:
mo_exists = True
# check top-level mo props
kwargs = dict(descr=vhba_template['descr'])
kwargs['switch_id'] = vhba_template['fabric']
kwargs['redundancy_pair_type'] = vhba_template['redundancy_type']
kwargs['templ_type'] = vhba_template['template_type']
kwargs['max_data_field_size'] = vhba_template['max_data']
kwargs['ident_pool_name'] = vhba_template['wwpn_pool']
kwargs['qos_policy_name'] = vhba_template['qos_policy']
kwargs['pin_to_group_name'] = vhba_template['pin_group']
kwargs['stats_policy_name'] = vhba_template['stats_policy']
if (mo.check_prop_match(**kwargs)):
# top-level props match, check next level mo/props
child_dn = dn + '/if-default'
mo_1 = ucs.login_handle.query_dn(child_dn)
if mo_1:
kwargs = dict(name=vhba_template['vsan'])
if (mo_1.check_prop_match(**kwargs)):
props_match = True
if module.params['state'] == 'absent':
# mo must exist but all properties do not have to match
if mo_exists:
if not module.check_mode:
ucs.login_handle.remove_mo(mo)
ucs.login_handle.commit()
changed = True
else:
if not props_match:
if not module.check_mode:
# create if mo does not already exist
mo = VnicSanConnTempl(
parent_mo_or_dn=module.params['org_dn'],
name=vhba_template['name'],
descr=vhba_template['descr'],
switch_id=vhba_template['fabric'],
redundancy_pair_type=vhba_template['redundancy_type'],
templ_type=vhba_template['template_type'],
max_data_field_size=vhba_template['max_data'],
ident_pool_name=vhba_template['wwpn_pool'],
qos_policy_name=vhba_template['qos_policy'],
pin_to_group_name=vhba_template['pin_group'],
stats_policy_name=vhba_template['stats_policy'],
)
mo_1 = VnicFcIf(
parent_mo_or_dn=mo,
name=vhba_template['vsan'],
)
ucs.login_handle.add_mo(mo, True)
ucs.login_handle.commit()
changed = True
except Exception as e:
err = True
ucs.result['msg'] = "setup error: %s " % str(e)
ucs.result['changed'] = changed
if err:
module.fail_json(**ucs.result)
module.exit_json(**ucs.result)
if __name__ == '__main__':
main()
|
gpl-3.0
|
chuckfairy/three.js
|
utils/exporters/blender/addons/io_three/exporter/_json.py
|
296
|
6976
|
import json
from .. import constants
ROUND = constants.DEFAULT_PRECISION
## THREE override function
def _json_floatstr(o):
if ROUND is not None:
o = round(o, ROUND)
return '%g' % o
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
ValueError=ValueError,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
str=str,
tuple=tuple,
):
'''
Overwrite json.encoder for Python 2.7 and above to not
assign each index of a list or tuple to its own row as
this is completely asinine behaviour
'''
## @THREE
# Override the function
_floatstr = _json_floatstr
if _indent is not None and not isinstance(_indent, str):
_indent = ' ' * _indent
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
## @THREEJS
# - block the moronic functionality that puts each
# index on its own line causing insane row counts
#if _indent is not None:
# _current_indent_level += 1
# newline_indent = '\n' + _indent * _current_indent_level
# separator = _item_separator + newline_indent
# buf += newline_indent
#else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, str):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, int):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + _indent * _current_indent_level
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = sorted(dct.items(), key=lambda kv: kv[0])
else:
items = dct.items()
for key, value in items:
if isinstance(key, str):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, int):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, str):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, int):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, str):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, int):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
# override the encoder
json.encoder._make_iterencode = _make_iterencode
|
mit
|
dlazz/ansible
|
lib/ansible/module_utils/memset.py
|
64
|
5156
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c) 2018, Simon Weald <[email protected]>
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from ansible.module_utils.six.moves.urllib.parse import urlencode
from ansible.module_utils.urls import open_url, urllib_error
from ansible.module_utils.basic import json
class Response(object):
'''
Create a response object to mimic that of requests.
'''
def __init__(self):
self.content = None
self.status_code = None
def json(self):
return json.loads(self.content)
def memset_api_call(api_key, api_method, payload=None):
'''
Generic function which returns results back to calling function.
Requires an API key and an API method to assemble the API URL.
Returns response text to be analysed.
'''
# instantiate a response object
response = Response()
# if we've already started preloading the payload then copy it
# and use that, otherwise we need to isntantiate it.
if payload is None:
payload = dict()
else:
payload = payload.copy()
# set some sane defaults
has_failed = False
msg = None
data = urlencode(payload)
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
api_uri_base = 'https://api.memset.com/v1/json/'
api_uri = '{0}{1}/' . format(api_uri_base, api_method)
try:
resp = open_url(api_uri, data=data, headers=headers, method="POST", force_basic_auth=True, url_username=api_key)
response.content = resp.read().decode('utf-8')
response.status_code = resp.getcode()
except urllib_error.HTTPError as e:
try:
errorcode = e.code
except AttributeError:
errorcode = None
has_failed = True
response.content = e.read().decode('utf8')
response.status_code = errorcode
if response.status_code is not None:
msg = "Memset API returned a {0} response ({1}, {2})." . format(response.status_code, response.json()['error_type'], response.json()['error'])
else:
msg = "Memset API returned an error ({0}, {1})." . format(response.json()['error_type'], response.json()['error'])
if msg is None:
msg = response.json()
return(has_failed, msg, response)
def check_zone_domain(data, domain):
'''
Returns true if domain already exists, and false if not.
'''
exists = False
if data.status_code in [201, 200]:
for zone_domain in data.json():
if zone_domain['domain'] == domain:
exists = True
return(exists)
def check_zone(data, name):
'''
Returns true if zone already exists, and false if not.
'''
counter = 0
exists = False
if data.status_code in [201, 200]:
for zone in data.json():
if zone['nickname'] == name:
counter += 1
if counter == 1:
exists = True
return(exists, counter)
def get_zone_id(zone_name, current_zones):
'''
Returns the zone's id if it exists and is unique
'''
zone_exists = False
zone_id, msg = None, None
zone_list = []
for zone in current_zones:
if zone['nickname'] == zone_name:
zone_list.append(zone['id'])
counter = len(zone_list)
if counter == 0:
msg = 'No matching zone found'
elif counter == 1:
zone_id = zone_list[0]
zone_exists = True
elif counter > 1:
zone_id = None
msg = 'Zone ID could not be returned as duplicate zone names were detected'
return(zone_exists, msg, counter, zone_id)
|
gpl-3.0
|
RBE-Avionik/skylines
|
config/default.py
|
3
|
1670
|
# -*- coding: utf-8 -*-
import os.path
here = os.path.abspath(os.path.dirname(__file__))
base = os.path.abspath(os.path.join(here, '..'))
DEBUG_TB_INTERCEPT_REDIRECTS = False
DEBUG = True
SECRET_KEY = 'skylines'
SMTP_SERVER = 'localhost'
EMAIL_FROM = 'SkyLines <[email protected]>'
"""
# Logging handlers (disabled in DEBUG mode)
file_handler = (
'INFO', 'RotatingFileHandler',
('/home/turbo/skylines.log', 'a', 10000, 4))
LOGGING_HANDLERS = [file_handler]
SENTRY_DSN = 'https://foo:[email protected]/appid'
"""
# This should probably be changed for a multi-threaded production server
CACHE_TYPE = 'simple'
SQLALCHEMY_DATABASE_URI = 'postgresql:///skylines'
SQLALCHEMY_TRACK_MODIFICATIONS = False
ASSETS_LOAD_DIR = os.path.join(base, 'skylines', 'frontend', 'static')
SKYLINES_FILES_PATH = os.path.join(base, 'htdocs', 'files')
SKYLINES_ELEVATION_PATH = os.path.join(base, 'htdocs', 'srtm')
SKYLINES_MAPSERVER_PATH = os.path.join(base, 'mapserver')
SKYLINES_TEMPORARY_DIR = '/tmp'
# how many entries should a list have?
SKYLINES_LISTS_DISPLAY_LENGTH = 50
SKYLINES_MAP_TILE_URL = 'https://www.skylines.aero/mapproxy'
BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
CELERYD_LOG_LEVEL = 'INFO'
# limits for AnalyseFlight
SKYLINES_ANALYSIS_ITER = 10e6 # iteration limit, should be around 10e6 to 50e6
SKYLINES_ANALYSIS_MEMORY = 256 # approx memory limit in MB
# List of airspace types to check for infringements
SKYLINES_AIRSPACE_CHECK = ('RESTRICT', 'DANGER', 'PROHIBITED', 'CTR',
'CLASSA', 'CLASSB', 'CLASSC', 'CLASSD',
'NOGLIDER', 'TMZ', 'MATZ')
|
agpl-3.0
|
tectronics/chimerascan
|
chimerascan/deprecated/generate_transcript_reads.py
|
6
|
3842
|
'''
Created on Jul 14, 2011
@author: mkiyer
'''
import os
import subprocess
import chimerascan.lib.config as config
from chimerascan.lib.feature import GeneFeature
from chimerascan.lib.seq import DNA_reverse_complement
from chimerascan import pysam
BASES_PER_LINE = 50
def split_seq(seq, chars_per_line):
pos = 0
newseq = []
while pos < len(seq):
if pos + chars_per_line > len(seq):
endpos = len(seq)
else:
endpos = pos + chars_per_line
newseq.append(seq[pos:endpos])
pos = endpos
return '\n'.join(newseq)
def bed12_to_fasta(gene_feature_file, reference_seq_file):
ref_fa = pysam.Fastafile(reference_seq_file)
for g in GeneFeature.parse(open(gene_feature_file)):
exon_seqs = []
error_occurred = False
for start, end in g.exons:
seq = ref_fa.fetch(g.chrom, start, end)
if not seq:
error_occurred = True
break
exon_seqs.append(seq)
if error_occurred:
continue
# make fasta record
seq = ''.join(exon_seqs)
if g.strand == '-':
seq = DNA_reverse_complement(seq)
# break seq onto multiple lines
seqlines = split_seq(seq, BASES_PER_LINE)
yield (">%s range=%s:%d-%d gene=%s strand=%s\n%s" %
(config.GENE_REF_PREFIX + g.tx_name, g.chrom, start, end, g.gene_name, g.strand, seqlines))
ref_fa.close()
def main():
from optparse import OptionParser
parser = OptionParser("usage: %prog [options] <index> <gene_features.txt> <outprefix>")
parser.add_option("--error-rate", dest="error_rate", type="float", default=0.020)
parser.add_option("-N", dest="num_reads", type="int",
default=100, metavar="N",
help="number of reads [default=%default]")
parser.add_option("--rlen", dest="rlen", type="int",
default=50, metavar="N",
help="read length [default=%default]")
parser.add_option("--isize", dest="isize", type="int",
default=200, metavar="N",
help="insert size [default=%default]")
parser.add_option("--isize-stdev", dest="isize_stdev", type="float",
default=20, metavar="N",
help="insert size standard deviation [defaul=%default]")
parser.add_option("--library", dest="library_type",
default="fr-unstranded",
help="library type [default=%default]")
parser.add_option("--wgsim-dir", dest="wgsim_dir",
default="", help="directory containing 'wgsim' tool "
"packaged with samtools [default=%default]")
options, args = parser.parse_args()
if len(args) < 2:
parser.error("Not enough input arguments")
# extract command line arguments
index_dir = args[0]
gene_feature_file = args[1]
out_prefix = args[2]
ref_fasta_file = os.path.join(index_dir, config.ALIGN_INDEX_FASTA_FILE)
# make FASTA from the gene features in the input
f = open("bubba", "w")
for fasta_seq in bed12_to_fasta(gene_feature_file, ref_fasta_file):
print >>f, fasta_seq
f.close()
# generate reads with 'wgsim'
args = [os.path.join(options.wgsim_dir, "wgsim"),
"-e", options.error_rate,
"-d", options.isize,
"-s", options.isize_stdev,
"-N", options.num_reads,
"-1", options.rlen,
"-2", options.rlen,
"-r", 0.0,
"-R", 0.0,
"bubba",
out_prefix + "_1.fq",
out_prefix + "_2.fq"]
subprocess.call(map(str, args))
os.remove("bubba")
if __name__ == '__main__':
main()
|
gpl-3.0
|
Signbank/BSL-signbank
|
signbank/registration/migrations/0006_existing_users_must_agree_to_data_protection.py
|
1
|
5683
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Don't use "from appname.models import ModelName".
# Use orm.ModelName to refer to models in this application,
# and orm['appname.ModelName'] for models in other applications.
for t in orm.UserProfile.objects.all():
t.data_protection_agree = False
t.save()
def backwards(self, orm):
"Write your backwards methods here."
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'registration.registrationprofile': {
'Meta': {'object_name': 'RegistrationProfile'},
'activation_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'unique': 'True'})
},
u'registration.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'auslan_user': ('django.db.models.fields.BooleanField', [], {}),
'australian': ('django.db.models.fields.BooleanField', [], {}),
'background': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '20'}),
'data_protection_agree': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'deaf': ('django.db.models.fields.BooleanField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'learned': ('django.db.models.fields.IntegerField', [], {}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'researcher_credentials': ('django.db.models.fields.TextField', [], {'default': "''"}),
'school': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'schooltype': ('django.db.models.fields.IntegerField', [], {}),
'teachercomm': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'unique': 'True'}),
'yob': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['registration']
symmetrical = True
|
bsd-3-clause
|
FuzzJunket/pelican-plugins
|
simple_footnotes/simple_footnotes.py
|
30
|
4327
|
from pelican import signals
import re
import html5lib
RAW_FOOTNOTE_CONTAINERS = ["code"]
def getText(node, recursive = False):
"""Get all the text associated with this node.
With recursive == True, all text from child nodes is retrieved."""
L = ['']
for n in node.childNodes:
if n.nodeType in (node.TEXT_NODE, node.CDATA_SECTION_NODE):
L.append(n.data)
else:
if not recursive:
return None
L.append(getText(n) )
return ''.join(L)
def parse_for_footnotes(article_generator):
for article in article_generator.articles:
if "[ref]" in article._content and "[/ref]" in article._content:
content = article._content.replace("[ref]", "<x-simple-footnote>").replace("[/ref]", "</x-simple-footnote>")
parser = html5lib.HTMLParser(tree=html5lib.getTreeBuilder("dom"))
dom = parser.parse(content)
endnotes = []
count = 0
for footnote in dom.getElementsByTagName("x-simple-footnote"):
pn = footnote
leavealone = False
while pn:
if pn.nodeName in RAW_FOOTNOTE_CONTAINERS:
leavealone = True
break
pn = pn.parentNode
if leavealone:
continue
count += 1
fnid = "sf-%s-%s" % (article.slug, count)
fnbackid = "%s-back" % (fnid,)
endnotes.append((footnote, fnid, fnbackid))
number = dom.createElement("sup")
number.setAttribute("id", fnbackid)
numbera = dom.createElement("a")
numbera.setAttribute("href", "#%s" % fnid)
numbera.setAttribute("class", "simple-footnote")
numbera.appendChild(dom.createTextNode(str(count)))
txt = getText(footnote, recursive=True).replace("\n", " ")
numbera.setAttribute("title", txt)
number.appendChild(numbera)
footnote.parentNode.insertBefore(number, footnote)
if endnotes:
ol = dom.createElement("ol")
ol.setAttribute("class", "simple-footnotes")
for e, fnid, fnbackid in endnotes:
li = dom.createElement("li")
li.setAttribute("id", fnid)
while e.firstChild:
li.appendChild(e.firstChild)
backlink = dom.createElement("a")
backlink.setAttribute("href", "#%s" % fnbackid)
backlink.setAttribute("class", "simple-footnote-back")
backlink.appendChild(dom.createTextNode(u'\u21a9'))
li.appendChild(dom.createTextNode(" "))
li.appendChild(backlink)
ol.appendChild(li)
e.parentNode.removeChild(e)
dom.getElementsByTagName("body")[0].appendChild(ol)
s = html5lib.serializer.htmlserializer.HTMLSerializer(omit_optional_tags=False, quote_attr_values=True)
output_generator = s.serialize(html5lib.treewalkers.getTreeWalker("dom")(dom.getElementsByTagName("body")[0]))
article._content = "".join(list(output_generator)).replace(
"<x-simple-footnote>", "[ref]").replace("</x-simple-footnote>", "[/ref]").replace(
"<body>", "").replace("</body>", "")
if False:
count = 0
endnotes = []
for f in footnotes:
count += 1
fnstr = '<a class="simple-footnote" name="%s-%s-back" href="#%s-%s"><sup>%s</a>' % (
article.slug, count, article.slug, count, count)
endstr = '<li id="%s-%s">%s <a href="#%s-%s-back">↑</a></li>' % (
article.slug, count, f[len("[ref]"):-len("[/ref]")], article.slug, count)
content = content.replace(f, fnstr)
endnotes.append(endstr)
content += '<h4>Footnotes</h4><ol class="simple-footnotes">%s</ul>' % ("\n".join(endnotes),)
article._content = content
def register():
signals.article_generator_finalized.connect(parse_for_footnotes)
|
agpl-3.0
|
TNT-Samuel/Coding-Projects
|
DNS Server/Source/Lib/ctypes/test/test_byteswap.py
|
32
|
11411
|
import sys, unittest, struct, math, ctypes
from binascii import hexlify
from ctypes import *
def bin(s):
return hexlify(memoryview(s)).decode().upper()
# Each *simple* type that supports different byte orders has an
# __ctype_be__ attribute that specifies the same type in BIG ENDIAN
# byte order, and a __ctype_le__ attribute that is the same type in
# LITTLE ENDIAN byte order.
#
# For Structures and Unions, these types are created on demand.
class Test(unittest.TestCase):
@unittest.skip('test disabled')
def test_X(self):
print(sys.byteorder, file=sys.stderr)
for i in range(32):
bits = BITS()
setattr(bits, "i%s" % i, 1)
dump(bits)
def test_slots(self):
class BigPoint(BigEndianStructure):
__slots__ = ()
_fields_ = [("x", c_int), ("y", c_int)]
class LowPoint(LittleEndianStructure):
__slots__ = ()
_fields_ = [("x", c_int), ("y", c_int)]
big = BigPoint()
little = LowPoint()
big.x = 4
big.y = 2
little.x = 2
little.y = 4
with self.assertRaises(AttributeError):
big.z = 42
with self.assertRaises(AttributeError):
little.z = 24
def test_endian_short(self):
if sys.byteorder == "little":
self.assertIs(c_short.__ctype_le__, c_short)
self.assertIs(c_short.__ctype_be__.__ctype_le__, c_short)
else:
self.assertIs(c_short.__ctype_be__, c_short)
self.assertIs(c_short.__ctype_le__.__ctype_be__, c_short)
s = c_short.__ctype_be__(0x1234)
self.assertEqual(bin(struct.pack(">h", 0x1234)), "1234")
self.assertEqual(bin(s), "1234")
self.assertEqual(s.value, 0x1234)
s = c_short.__ctype_le__(0x1234)
self.assertEqual(bin(struct.pack("<h", 0x1234)), "3412")
self.assertEqual(bin(s), "3412")
self.assertEqual(s.value, 0x1234)
s = c_ushort.__ctype_be__(0x1234)
self.assertEqual(bin(struct.pack(">h", 0x1234)), "1234")
self.assertEqual(bin(s), "1234")
self.assertEqual(s.value, 0x1234)
s = c_ushort.__ctype_le__(0x1234)
self.assertEqual(bin(struct.pack("<h", 0x1234)), "3412")
self.assertEqual(bin(s), "3412")
self.assertEqual(s.value, 0x1234)
def test_endian_int(self):
if sys.byteorder == "little":
self.assertIs(c_int.__ctype_le__, c_int)
self.assertIs(c_int.__ctype_be__.__ctype_le__, c_int)
else:
self.assertIs(c_int.__ctype_be__, c_int)
self.assertIs(c_int.__ctype_le__.__ctype_be__, c_int)
s = c_int.__ctype_be__(0x12345678)
self.assertEqual(bin(struct.pack(">i", 0x12345678)), "12345678")
self.assertEqual(bin(s), "12345678")
self.assertEqual(s.value, 0x12345678)
s = c_int.__ctype_le__(0x12345678)
self.assertEqual(bin(struct.pack("<i", 0x12345678)), "78563412")
self.assertEqual(bin(s), "78563412")
self.assertEqual(s.value, 0x12345678)
s = c_uint.__ctype_be__(0x12345678)
self.assertEqual(bin(struct.pack(">I", 0x12345678)), "12345678")
self.assertEqual(bin(s), "12345678")
self.assertEqual(s.value, 0x12345678)
s = c_uint.__ctype_le__(0x12345678)
self.assertEqual(bin(struct.pack("<I", 0x12345678)), "78563412")
self.assertEqual(bin(s), "78563412")
self.assertEqual(s.value, 0x12345678)
def test_endian_longlong(self):
if sys.byteorder == "little":
self.assertIs(c_longlong.__ctype_le__, c_longlong)
self.assertIs(c_longlong.__ctype_be__.__ctype_le__, c_longlong)
else:
self.assertIs(c_longlong.__ctype_be__, c_longlong)
self.assertIs(c_longlong.__ctype_le__.__ctype_be__, c_longlong)
s = c_longlong.__ctype_be__(0x1234567890ABCDEF)
self.assertEqual(bin(struct.pack(">q", 0x1234567890ABCDEF)), "1234567890ABCDEF")
self.assertEqual(bin(s), "1234567890ABCDEF")
self.assertEqual(s.value, 0x1234567890ABCDEF)
s = c_longlong.__ctype_le__(0x1234567890ABCDEF)
self.assertEqual(bin(struct.pack("<q", 0x1234567890ABCDEF)), "EFCDAB9078563412")
self.assertEqual(bin(s), "EFCDAB9078563412")
self.assertEqual(s.value, 0x1234567890ABCDEF)
s = c_ulonglong.__ctype_be__(0x1234567890ABCDEF)
self.assertEqual(bin(struct.pack(">Q", 0x1234567890ABCDEF)), "1234567890ABCDEF")
self.assertEqual(bin(s), "1234567890ABCDEF")
self.assertEqual(s.value, 0x1234567890ABCDEF)
s = c_ulonglong.__ctype_le__(0x1234567890ABCDEF)
self.assertEqual(bin(struct.pack("<Q", 0x1234567890ABCDEF)), "EFCDAB9078563412")
self.assertEqual(bin(s), "EFCDAB9078563412")
self.assertEqual(s.value, 0x1234567890ABCDEF)
def test_endian_float(self):
if sys.byteorder == "little":
self.assertIs(c_float.__ctype_le__, c_float)
self.assertIs(c_float.__ctype_be__.__ctype_le__, c_float)
else:
self.assertIs(c_float.__ctype_be__, c_float)
self.assertIs(c_float.__ctype_le__.__ctype_be__, c_float)
s = c_float(math.pi)
self.assertEqual(bin(struct.pack("f", math.pi)), bin(s))
# Hm, what's the precision of a float compared to a double?
self.assertAlmostEqual(s.value, math.pi, places=6)
s = c_float.__ctype_le__(math.pi)
self.assertAlmostEqual(s.value, math.pi, places=6)
self.assertEqual(bin(struct.pack("<f", math.pi)), bin(s))
s = c_float.__ctype_be__(math.pi)
self.assertAlmostEqual(s.value, math.pi, places=6)
self.assertEqual(bin(struct.pack(">f", math.pi)), bin(s))
def test_endian_double(self):
if sys.byteorder == "little":
self.assertIs(c_double.__ctype_le__, c_double)
self.assertIs(c_double.__ctype_be__.__ctype_le__, c_double)
else:
self.assertIs(c_double.__ctype_be__, c_double)
self.assertIs(c_double.__ctype_le__.__ctype_be__, c_double)
s = c_double(math.pi)
self.assertEqual(s.value, math.pi)
self.assertEqual(bin(struct.pack("d", math.pi)), bin(s))
s = c_double.__ctype_le__(math.pi)
self.assertEqual(s.value, math.pi)
self.assertEqual(bin(struct.pack("<d", math.pi)), bin(s))
s = c_double.__ctype_be__(math.pi)
self.assertEqual(s.value, math.pi)
self.assertEqual(bin(struct.pack(">d", math.pi)), bin(s))
def test_endian_other(self):
self.assertIs(c_byte.__ctype_le__, c_byte)
self.assertIs(c_byte.__ctype_be__, c_byte)
self.assertIs(c_ubyte.__ctype_le__, c_ubyte)
self.assertIs(c_ubyte.__ctype_be__, c_ubyte)
self.assertIs(c_char.__ctype_le__, c_char)
self.assertIs(c_char.__ctype_be__, c_char)
def test_struct_fields_1(self):
if sys.byteorder == "little":
base = BigEndianStructure
else:
base = LittleEndianStructure
class T(base):
pass
_fields_ = [("a", c_ubyte),
("b", c_byte),
("c", c_short),
("d", c_ushort),
("e", c_int),
("f", c_uint),
("g", c_long),
("h", c_ulong),
("i", c_longlong),
("k", c_ulonglong),
("l", c_float),
("m", c_double),
("n", c_char),
("b1", c_byte, 3),
("b2", c_byte, 3),
("b3", c_byte, 2),
("a", c_int * 3 * 3 * 3)]
T._fields_ = _fields_
# these fields do not support different byte order:
for typ in c_wchar, c_void_p, POINTER(c_int):
_fields_.append(("x", typ))
class T(base):
pass
self.assertRaises(TypeError, setattr, T, "_fields_", [("x", typ)])
def test_struct_struct(self):
# nested structures with different byteorders
# create nested structures with given byteorders and set memory to data
for nested, data in (
(BigEndianStructure, b'\0\0\0\1\0\0\0\2'),
(LittleEndianStructure, b'\1\0\0\0\2\0\0\0'),
):
for parent in (
BigEndianStructure,
LittleEndianStructure,
Structure,
):
class NestedStructure(nested):
_fields_ = [("x", c_uint32),
("y", c_uint32)]
class TestStructure(parent):
_fields_ = [("point", NestedStructure)]
self.assertEqual(len(data), sizeof(TestStructure))
ptr = POINTER(TestStructure)
s = cast(data, ptr)[0]
del ctypes._pointer_type_cache[TestStructure]
self.assertEqual(s.point.x, 1)
self.assertEqual(s.point.y, 2)
def test_struct_fields_2(self):
# standard packing in struct uses no alignment.
# So, we have to align using pad bytes.
#
# Unaligned accesses will crash Python (on those platforms that
# don't allow it, like sparc solaris).
if sys.byteorder == "little":
base = BigEndianStructure
fmt = ">bxhid"
else:
base = LittleEndianStructure
fmt = "<bxhid"
class S(base):
_fields_ = [("b", c_byte),
("h", c_short),
("i", c_int),
("d", c_double)]
s1 = S(0x12, 0x1234, 0x12345678, 3.14)
s2 = struct.pack(fmt, 0x12, 0x1234, 0x12345678, 3.14)
self.assertEqual(bin(s1), bin(s2))
def test_unaligned_nonnative_struct_fields(self):
if sys.byteorder == "little":
base = BigEndianStructure
fmt = ">b h xi xd"
else:
base = LittleEndianStructure
fmt = "<b h xi xd"
class S(base):
_pack_ = 1
_fields_ = [("b", c_byte),
("h", c_short),
("_1", c_byte),
("i", c_int),
("_2", c_byte),
("d", c_double)]
s1 = S()
s1.b = 0x12
s1.h = 0x1234
s1.i = 0x12345678
s1.d = 3.14
s2 = struct.pack(fmt, 0x12, 0x1234, 0x12345678, 3.14)
self.assertEqual(bin(s1), bin(s2))
def test_unaligned_native_struct_fields(self):
if sys.byteorder == "little":
fmt = "<b h xi xd"
else:
base = LittleEndianStructure
fmt = ">b h xi xd"
class S(Structure):
_pack_ = 1
_fields_ = [("b", c_byte),
("h", c_short),
("_1", c_byte),
("i", c_int),
("_2", c_byte),
("d", c_double)]
s1 = S()
s1.b = 0x12
s1.h = 0x1234
s1.i = 0x12345678
s1.d = 3.14
s2 = struct.pack(fmt, 0x12, 0x1234, 0x12345678, 3.14)
self.assertEqual(bin(s1), bin(s2))
if __name__ == "__main__":
unittest.main()
|
gpl-3.0
|
rdhyee/osf.io
|
admin_tests/nodes/test_views.py
|
18
|
5243
|
from django.test import RequestFactory
from nose import tools as nt
import mock
from admin.common_auth.logs import OSFLogEntry
from tests.base import AdminTestCase
from tests.factories import ProjectFactory, AuthUserFactory
from admin_tests.utilities import setup_view, setup_log_view
from admin.nodes.views import (
NodeView,
NodeRemoveContributorView,
NodeDeleteView
)
from website.project.model import NodeLog, Node
from framework.auth import User
class TestNodeView(AdminTestCase):
def test_no_guid(self):
request = RequestFactory().get('/fake_path')
view = NodeView()
view = setup_view(view, request)
with nt.assert_raises(AttributeError):
view.get_object()
def test_load_data(self):
node = ProjectFactory()
guid = node._id
request = RequestFactory().get('/fake_path')
view = NodeView()
view = setup_view(view, request, guid=guid)
res = view.get_object()
nt.assert_is_instance(res, dict)
def test_name_data(self):
node = ProjectFactory()
guid = node._id
request = RequestFactory().get('/fake_path')
view = NodeView()
view = setup_view(view, request, guid=guid)
temp_object = view.get_object()
view.object = temp_object
res = view.get_context_data()
nt.assert_equal(res[NodeView.context_object_name], temp_object)
class TestNodeDeleteView(AdminTestCase):
def setUp(self):
super(TestNodeDeleteView, self).setUp()
self.node = ProjectFactory()
self.request = RequestFactory().post('/fake_path')
self.view = NodeDeleteView()
self.view = setup_log_view(self.view, self.request,
guid=self.node._id)
def test_get_object(self):
obj = self.view.get_object()
nt.assert_is_instance(obj, Node)
def test_get_context(self):
res = self.view.get_context_data(object=self.node)
nt.assert_in('guid', res)
nt.assert_equal(res.get('guid'), self.node._id)
def test_remove_node(self):
count = OSFLogEntry.objects.count()
self.view.delete(self.request)
self.node.reload()
nt.assert_true(self.node.is_deleted)
nt.assert_equal(OSFLogEntry.objects.count(), count + 1)
def test_restore_node(self):
self.view.delete(self.request)
nt.assert_true(self.node.is_deleted)
count = OSFLogEntry.objects.count()
self.view.delete(self.request)
self.node.reload()
nt.assert_false(self.node.is_deleted)
nt.assert_equal(OSFLogEntry.objects.count(), count + 1)
class TestRemoveContributor(AdminTestCase):
def setUp(self):
super(TestRemoveContributor, self).setUp()
self.user = AuthUserFactory()
self.node = ProjectFactory(creator=self.user)
self.user_2 = AuthUserFactory()
self.node.add_contributor(self.user_2)
self.node.save()
self.view = NodeRemoveContributorView()
self.request = RequestFactory().post('/fake_path')
def test_get_object(self):
view = setup_log_view(self.view, self.request, node_id=self.node._id,
user_id=self.user._id)
node, user = view.get_object()
nt.assert_is_instance(node, Node)
nt.assert_is_instance(user, User)
@mock.patch('admin.nodes.views.Node.remove_contributor')
def test_remove_contributor(self, mock_remove_contributor):
user_id = self.user_2._id
node_id = self.node._id
view = setup_log_view(self.view, self.request, node_id=node_id,
user_id=user_id)
view.delete(self.request)
mock_remove_contributor.assert_called_with(self.user_2, None, log=False)
def test_integration_remove_contributor(self):
nt.assert_in(self.user_2, self.node.contributors)
view = setup_log_view(self.view, self.request, node_id=self.node._id,
user_id=self.user_2._id)
count = OSFLogEntry.objects.count()
view.delete(self.request)
nt.assert_not_in(self.user_2, self.node.contributors)
nt.assert_equal(OSFLogEntry.objects.count(), count + 1)
def test_do_not_remove_last_admin(self):
nt.assert_equal(
len(list(self.node.get_admin_contributors(self.node.contributors))),
1
)
view = setup_log_view(self.view, self.request, node_id=self.node._id,
user_id=self.user._id)
count = OSFLogEntry.objects.count()
view.delete(self.request)
self.node.reload() # Reloads instance to show that nothing was removed
nt.assert_equal(len(list(self.node.contributors)), 2)
nt.assert_equal(
len(list(self.node.get_admin_contributors(self.node.contributors))),
1
)
nt.assert_equal(OSFLogEntry.objects.count(), count)
def test_no_log(self):
view = setup_log_view(self.view, self.request, node_id=self.node._id,
user_id=self.user_2._id)
view.delete(self.request)
nt.assert_not_equal(self.node.logs[-1].action, NodeLog.CONTRIB_REMOVED)
|
apache-2.0
|
chisimba/modules
|
geonames/resources/loadgeodata.py
|
1
|
8244
|
import fileinput
import time
import base64
import xmlrpclib
import os
import random
import sys
import zipfile
import os.path
#Define some variables (Change these to suit your circumstances!)
SERV = 'http://127.0.0.1/'
UNAME = 'admin'
PWORD = 'a'
APIENDPOINT = '/app/index.php?module=api'
# -- END EDITABLE REGION -- Please do not attempt to edit below this line, you will break the system!
def unzip_file_into_dir(file, dir):
if not os.path.exists(dir):
os.mkdir(dir, 0777)
zfobj = zipfile.ZipFile(file)
for name in zfobj.namelist():
outfile = open(os.path.join(dir, name), 'wb')
outfile.write(zfobj.read(name))
outfile.close()
def grabFiles():
filestoget = ["allCountries.zip", "alternateNames.zip", "userTags.zip", "admin1Codes.txt", "admin1CodesASCII.txt", "admin2Codes.txt", "countryInfo.txt", "featureCodes_en.txt", "iso-languagecodes.txt", "timeZones.txt"]
unzippables = ["allCountries.zip", "alternateNames.zip", "userTags.zip"]
# Get all the files first
for item in filestoget:
print "Downloading: "+item
os.system("wget http://download.geonames.org/export/dump/"+item)
print "Downloading triples"
os.system("wget http://download.geonames.org/all-geonames-rdf.zip")
print "All files downloaded!... Processing..."
# Unzip the zipballs
for item in unzippables:
print "Unzipping "+item
unzip_file_into_dir(item, '.')
unzip_file_into_dir("all-geonames-rdf.zip", '.')
print "Done! Uploading data to server..."
def doRDFRPC(line):
server_url = SERV+APIENDPOINT;
# Set up the server.
server = xmlrpclib.Server(server_url);
try:
encoded = encoded = base64.b64encode(line)
result = server.geordf.accept(encoded)
print result
except:
print "RPC FAILED"
sys.exit()
def doCountryRPC(line):
server_url = SERV+APIENDPOINT;
# Set up the server.
server = xmlrpclib.Server(server_url);
try:
encoded = encoded = base64.b64encode(line)
result = server.geordf.loaddata(encoded,UNAME, PWORD)
return result
except:
print "RPC FAILED"
sys.exit()
def doAdmin1CodesRPC(line):
server_url = SERV+APIENDPOINT;
# Set up the server.
server = xmlrpclib.Server(server_url);
try:
encoded = encoded = base64.b64encode(line)
result = server.geordf.loadAdmin1data(encoded,UNAME, PWORD)
return result
except:
print "RPC FAILED"
sys.exit()
def doAdmin1AsciiRPC(line):
server_url = SERV+APIENDPOINT;
# Set up the server.
server = xmlrpclib.Server(server_url);
try:
encoded = encoded = base64.b64encode(line)
result = server.geordf.loadAdmin1Asciidata(encoded,UNAME, PWORD)
return result
except:
print "RPC FAILED"
sys.exit()
def doAdmin2CodesRPC(line):
server_url = SERV+APIENDPOINT;
# Set up the server.
server = xmlrpclib.Server(server_url);
try:
encoded = encoded = base64.b64encode(line)
result = server.geordf.loadAdmin2data(encoded,UNAME, PWORD)
return result
except:
print "RPC FAILED"
sys.exit()
def doAltnamesRPC(line):
server_url = SERV+APIENDPOINT;
# Set up the server.
server = xmlrpclib.Server(server_url);
try:
encoded = base64.b64encode(line)
result = server.geordf.loadAltnamesdata(encoded,UNAME, PWORD)
return result
except:
print "RPC FAILED"
sys.exit()
def doCountryInfoRPC(line):
server_url = SERV+APIENDPOINT;
# Set up the server.
server = xmlrpclib.Server(server_url);
try:
encoded = base64.b64encode(line)
result = server.geordf.loadCountryInfodata(encoded,UNAME, PWORD)
return result
except:
print "RPC FAILED"
sys.exit()
def doFeatureCodeRPC(line):
server_url = SERV+APIENDPOINT;
# Set up the server.
server = xmlrpclib.Server(server_url);
try:
encoded = base64.b64encode(line)
result = server.geordf.loadFeatureCodedata(encoded,UNAME, PWORD)
return result
except:
print "RPC FAILED"
sys.exit()
def doIsoLangCodeRPC(line):
server_url = SERV+APIENDPOINT;
# Set up the server.
server = xmlrpclib.Server(server_url);
try:
encoded = base64.b64encode(line)
result = server.geordf.loadIsoLangCodedata(encoded,UNAME, PWORD)
return result
except:
print "RPC FAILED"
sys.exit()
def doTimeZoneRPC(line):
server_url = SERV+APIENDPOINT;
# Set up the server.
server = xmlrpclib.Server(server_url);
try:
encoded = base64.b64encode(line)
result = server.geordf.loadTimeZonedata(encoded,UNAME, PWORD)
return result
except:
print "RPC FAILED"
sys.exit()
def doUserTagsRPC(line):
server_url = SERV+APIENDPOINT;
# Set up the server.
server = xmlrpclib.Server(server_url);
try:
encoded = base64.b64encode(line)
result = server.geordf.loadUserTagsdata(encoded,UNAME, PWORD)
return result
except:
print "RPC FAILED"
sys.exit()
def main():
grabFiles()
count = 0
for line in fileinput.input(['allCountries.txt']):
count = count+1
print doCountryRPC(line)+": "+str(count)
print "Country data upload complete!"
#Now the admin1Codes
print "Starting to upload first level Admin codes..."
count = 0
for line in fileinput.input(['admin1Codes.txt']):
count = count+1
print doAdmin1CodesRPC(line)+": "+str(count)
print "First level Admin codes uploaded!"
#Now the admin1 ASCII Codes
print "Starting to upload first level Admin ASCII codes..."
count = 0
for line in fileinput.input(['admin1CodesASCII.txt']):
count = count+1
print doAdmin1AsciiRPC(line)+": "+str(count)
print "First level Admin ASCII codes uploaded!"
#Now the admin2Codes
print "Starting to upload second level Admin codes..."
count = 0
for line in fileinput.input(['admin2Codes.txt']):
count = count+1
print doAdmin2CodesRPC(line)+": "+str(count)
print "Second level Admin codes uploaded!"
#Now the alternate place names
print "Starting to upload alternate place names..."
count = 0
for line in fileinput.input(['alternateNames.txt']):
count = count+1
print doAltnamesRPC(line)+": "+str(count)
print "Alternate place names uploaded!"
#Now the Country info
print "Starting to upload country info..."
count = 0
for line in fileinput.input(['countryInfo.txt']):
count = count+1
print doCountryInfoRPC(line)+": "+str(count)
print "Country info uploaded!"
#Now the Feature codes
print "Starting to upload feature codes..."
count = 0
for line in fileinput.input(['featureCodes_en.txt']):
count = count+1
print doFeatureCodeRPC(line)+": "+str(count)
print "Feature codes uploaded!"
#Now the ISO Language codes
print "Starting to upload ISO language codes..."
count = 0
for line in fileinput.input(['iso-languagecodes.txt']):
count = count+1
print doIsoLangCodeRPC(line)+": "+str(count)
print "ISO language codes uploaded!"
#Now the timezones
print "Starting to upload time zone information..."
count = 0
for line in fileinput.input(['timeZones.txt']):
count = count+1
print doTimeZoneRPC(line)+": "+str(count)
print "Time zones uploaded!"
#Now the user tags
print "Starting to upload user tags..."
count = 0
for line in fileinput.input(['userTags.txt']):
count = count+1
print doUserTagsRPC(line)+": "+str(count)
print "User tags uploaded!"
#Lets finally do the RDF triples before we Geo-rize the whole database
print "Uploading Linked data..."
count = 0
for line in fileinput.input(['all-geonames-rdf.txt']):
count = count+1
print doRDFRPC(line)+": "+str(count)
print "RDF Triples uploaded!"
print "Complete!"
if __name__ == '__main__': main()
|
gpl-2.0
|
domenicosolazzo/philocademy
|
venv/src/node-v0.10.36/tools/gyp/pylib/gyp/generator/gypsh.py
|
2779
|
1665
|
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypsh output module
gypsh is a GYP shell. It's not really a generator per se. All it does is
fire up an interactive Python session with a few local variables set to the
variables passed to the generator. Like gypd, it's intended as a debugging
aid, to facilitate the exploration of .gyp structures after being processed
by the input module.
The expected usage is "gyp -f gypsh -D OS=desired_os".
"""
import code
import sys
# All of this stuff about generator variables was lovingly ripped from gypd.py.
# That module has a much better description of what's going on and why.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_DIRNAME',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
generator_default_variables = {
}
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
locals = {
'target_list': target_list,
'target_dicts': target_dicts,
'data': data,
}
# Use a banner that looks like the stock Python one and like what
# code.interact uses by default, but tack on something to indicate what
# locals are available, and identify gypsh.
banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
(sys.version, sys.platform, repr(sorted(locals.keys())))
code.interact(banner, local=locals)
|
mit
|
theguardian/JIRA-APPy
|
lib/tlslite/errors.py
|
2
|
6688
|
# Authors:
# Trevor Perrin
# Dave Baggett (Arcode Corporation) - Added TLSUnsupportedError.
#
# See the LICENSE file for legal information regarding use of this file.
"""Exception classes.
@sort: TLSError, TLSAbruptCloseError, TLSAlert, TLSLocalAlert, TLSRemoteAlert,
TLSAuthenticationError, TLSNoAuthenticationError, TLSAuthenticationTypeError,
TLSFingerprintError, TLSAuthorizationError, TLSValidationError, TLSFaultError,
TLSUnsupportedError
"""
import socket
from .constants import AlertDescription, AlertLevel
class TLSError(Exception):
"""Base class for all TLS Lite exceptions."""
def __str__(self):
""""At least print out the Exception time for str(...)."""
return repr(self)
class TLSClosedConnectionError(TLSError, socket.error):
"""An attempt was made to use the connection after it was closed."""
pass
class TLSAbruptCloseError(TLSError):
"""The socket was closed without a proper TLS shutdown.
The TLS specification mandates that an alert of some sort
must be sent before the underlying socket is closed. If the socket
is closed without this, it could signify that an attacker is trying
to truncate the connection. It could also signify a misbehaving
TLS implementation, or a random network failure.
"""
pass
class TLSAlert(TLSError):
"""A TLS alert has been signalled."""
pass
_descriptionStr = {\
AlertDescription.close_notify: "close_notify",\
AlertDescription.unexpected_message: "unexpected_message",\
AlertDescription.bad_record_mac: "bad_record_mac",\
AlertDescription.decryption_failed: "decryption_failed",\
AlertDescription.record_overflow: "record_overflow",\
AlertDescription.decompression_failure: "decompression_failure",\
AlertDescription.handshake_failure: "handshake_failure",\
AlertDescription.no_certificate: "no certificate",\
AlertDescription.bad_certificate: "bad_certificate",\
AlertDescription.unsupported_certificate: "unsupported_certificate",\
AlertDescription.certificate_revoked: "certificate_revoked",\
AlertDescription.certificate_expired: "certificate_expired",\
AlertDescription.certificate_unknown: "certificate_unknown",\
AlertDescription.illegal_parameter: "illegal_parameter",\
AlertDescription.unknown_ca: "unknown_ca",\
AlertDescription.access_denied: "access_denied",\
AlertDescription.decode_error: "decode_error",\
AlertDescription.decrypt_error: "decrypt_error",\
AlertDescription.export_restriction: "export_restriction",\
AlertDescription.protocol_version: "protocol_version",\
AlertDescription.insufficient_security: "insufficient_security",\
AlertDescription.internal_error: "internal_error",\
AlertDescription.inappropriate_fallback: "inappropriate_fallback",\
AlertDescription.user_canceled: "user_canceled",\
AlertDescription.no_renegotiation: "no_renegotiation",\
AlertDescription.unknown_psk_identity: "unknown_psk_identity"}
class TLSLocalAlert(TLSAlert):
"""A TLS alert has been signalled by the local implementation.
@type description: int
@ivar description: Set to one of the constants in
L{tlslite.constants.AlertDescription}
@type level: int
@ivar level: Set to one of the constants in
L{tlslite.constants.AlertLevel}
@type message: str
@ivar message: Description of what went wrong.
"""
def __init__(self, alert, message=None):
self.description = alert.description
self.level = alert.level
self.message = message
def __str__(self):
alertStr = TLSAlert._descriptionStr.get(self.description)
if alertStr == None:
alertStr = str(self.description)
if self.message:
return alertStr + ": " + self.message
else:
return alertStr
class TLSRemoteAlert(TLSAlert):
"""A TLS alert has been signalled by the remote implementation.
@type description: int
@ivar description: Set to one of the constants in
L{tlslite.constants.AlertDescription}
@type level: int
@ivar level: Set to one of the constants in
L{tlslite.constants.AlertLevel}
"""
def __init__(self, alert):
self.description = alert.description
self.level = alert.level
def __str__(self):
alertStr = TLSAlert._descriptionStr.get(self.description)
if alertStr == None:
alertStr = str(self.description)
return alertStr
class TLSAuthenticationError(TLSError):
"""The handshake succeeded, but the other party's authentication
was inadequate.
This exception will only be raised when a
L{tlslite.Checker.Checker} has been passed to a handshake function.
The Checker will be invoked once the handshake completes, and if
the Checker objects to how the other party authenticated, a
subclass of this exception will be raised.
"""
pass
class TLSNoAuthenticationError(TLSAuthenticationError):
"""The Checker was expecting the other party to authenticate with a
certificate chain, but this did not occur."""
pass
class TLSAuthenticationTypeError(TLSAuthenticationError):
"""The Checker was expecting the other party to authenticate with a
different type of certificate chain."""
pass
class TLSFingerprintError(TLSAuthenticationError):
"""The Checker was expecting the other party to authenticate with a
certificate chain that matches a different fingerprint."""
pass
class TLSAuthorizationError(TLSAuthenticationError):
"""The Checker was expecting the other party to authenticate with a
certificate chain that has a different authorization."""
pass
class TLSValidationError(TLSAuthenticationError):
"""The Checker has determined that the other party's certificate
chain is invalid."""
def __init__(self, msg, info=None):
# Include a dict containing info about this validation failure
TLSAuthenticationError.__init__(self, msg)
self.info = info
class TLSFaultError(TLSError):
"""The other party responded incorrectly to an induced fault.
This exception will only occur during fault testing, when a
TLSConnection's fault variable is set to induce some sort of
faulty behavior, and the other party doesn't respond appropriately.
"""
pass
class TLSUnsupportedError(TLSError):
"""The implementation doesn't support the requested (or required)
capabilities."""
pass
class TLSInternalError(TLSError):
"""The internal state of object is unexpected or invalid"""
pass
|
gpl-2.0
|
h2oai/h2o-dev
|
h2o-py/tests/testdir_parser/pyunit_NOFEATURE_orc_parser_prostate.py
|
6
|
1291
|
from builtins import str
import sys
sys.path.insert(1,"../../")
import h2o
from tests import pyunit_utils
def orc_parser_timestamp_date():
"""
To verify that the orc parser is parsing correctly, we want to take a file we know (prostate_NA.csv), convert
it to an Orc file (prostate_NA.orc) and build two H2O frames out of them. We compare them and verified that
they are the same.
Nidhi did this manually in Hive and verified that the parsing is correct. I am automating the test here.
:return: None
"""
tol_time = 200 # comparing in ms or ns
tol_numeric = 1e-5 # tolerance for comparing other numeric fields
numElements2Compare = 10 # choose number of elements per column to compare. Save test time.
h2oOrc = h2o.import_file(path=pyunit_utils.locate('smalldata/parser/orc/prostate_NA.orc'))
h2oCsv = h2o.import_file(path=pyunit_utils.locate('smalldata/parser/csv2orc/prostate_NA.csv'))
# compare the two frames
assert pyunit_utils.compare_frames(h2oOrc, h2oCsv, numElements2Compare, tol_time, tol_numeric), \
"H2O frame parsed from orc and csv files are different!"
if __name__ == "__main__":
pyunit_utils.standalone_test(orc_parser_timestamp_date)
else:
orc_parser_timestamp_date()
|
apache-2.0
|
sridevikoushik31/nova
|
nova/api/openstack/compute/server_metadata.py
|
10
|
6813
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from webob import exc
from nova.api.openstack import common
from nova.api.openstack import wsgi
from nova import compute
from nova import exception
class Controller(object):
"""The server metadata API controller for the OpenStack API."""
def __init__(self):
self.compute_api = compute.API()
super(Controller, self).__init__()
def _get_metadata(self, context, server_id):
try:
server = self.compute_api.get(context, server_id)
meta = self.compute_api.get_instance_metadata(context, server)
except exception.InstanceNotFound:
msg = _('Server does not exist')
raise exc.HTTPNotFound(explanation=msg)
meta_dict = {}
for key, value in meta.iteritems():
meta_dict[key] = value
return meta_dict
@wsgi.serializers(xml=common.MetadataTemplate)
def index(self, req, server_id):
"""Returns the list of metadata for a given instance."""
context = req.environ['nova.context']
return {'metadata': self._get_metadata(context, server_id)}
@wsgi.serializers(xml=common.MetadataTemplate)
@wsgi.deserializers(xml=common.MetadataDeserializer)
def create(self, req, server_id, body):
try:
metadata = body['metadata']
except (KeyError, TypeError):
msg = _("Malformed request body")
raise exc.HTTPBadRequest(explanation=msg)
context = req.environ['nova.context']
new_metadata = self._update_instance_metadata(context,
server_id,
metadata,
delete=False)
return {'metadata': new_metadata}
@wsgi.serializers(xml=common.MetaItemTemplate)
@wsgi.deserializers(xml=common.MetaItemDeserializer)
def update(self, req, server_id, id, body):
try:
meta_item = body['meta']
except (TypeError, KeyError):
expl = _('Malformed request body')
raise exc.HTTPBadRequest(explanation=expl)
if id not in meta_item:
expl = _('Request body and URI mismatch')
raise exc.HTTPBadRequest(explanation=expl)
if len(meta_item) > 1:
expl = _('Request body contains too many items')
raise exc.HTTPBadRequest(explanation=expl)
context = req.environ['nova.context']
self._update_instance_metadata(context,
server_id,
meta_item,
delete=False)
return {'meta': meta_item}
@wsgi.serializers(xml=common.MetadataTemplate)
@wsgi.deserializers(xml=common.MetadataDeserializer)
def update_all(self, req, server_id, body):
try:
metadata = body['metadata']
except (TypeError, KeyError):
expl = _('Malformed request body')
raise exc.HTTPBadRequest(explanation=expl)
context = req.environ['nova.context']
new_metadata = self._update_instance_metadata(context,
server_id,
metadata,
delete=True)
return {'metadata': new_metadata}
def _update_instance_metadata(self, context, server_id, metadata,
delete=False):
try:
server = self.compute_api.get(context, server_id)
return self.compute_api.update_instance_metadata(context,
server,
metadata,
delete)
except exception.InstanceNotFound:
msg = _('Server does not exist')
raise exc.HTTPNotFound(explanation=msg)
except (ValueError, AttributeError):
msg = _("Malformed request body")
raise exc.HTTPBadRequest(explanation=msg)
except exception.InvalidMetadata as error:
raise exc.HTTPBadRequest(explanation=error.format_message())
except exception.InvalidMetadataSize as error:
raise exc.HTTPRequestEntityTooLarge(
explanation=error.format_message())
except exception.QuotaError as error:
raise exc.HTTPRequestEntityTooLarge(
explanation=error.format_message(),
headers={'Retry-After': 0})
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'update metadata')
@wsgi.serializers(xml=common.MetaItemTemplate)
def show(self, req, server_id, id):
"""Return a single metadata item."""
context = req.environ['nova.context']
data = self._get_metadata(context, server_id)
try:
return {'meta': {id: data[id]}}
except KeyError:
msg = _("Metadata item was not found")
raise exc.HTTPNotFound(explanation=msg)
@wsgi.response(204)
def delete(self, req, server_id, id):
"""Deletes an existing metadata."""
context = req.environ['nova.context']
metadata = self._get_metadata(context, server_id)
if id not in metadata:
msg = _("Metadata item was not found")
raise exc.HTTPNotFound(explanation=msg)
try:
server = self.compute_api.get(context, server_id)
self.compute_api.delete_instance_metadata(context, server, id)
except exception.InstanceNotFound:
msg = _('Server does not exist')
raise exc.HTTPNotFound(explanation=msg)
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'delete metadata')
def create_resource():
return wsgi.Resource(Controller())
|
apache-2.0
|
orekyuu/intellij-community
|
python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/tokenize.py
|
115
|
19125
|
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation.
# All rights reserved.
"""Tokenization help for Python programs.
generate_tokens(readline) is a generator that breaks a stream of
text into Python tokens. It accepts a readline-like method which is called
repeatedly to get the next line of input (or "" for EOF). It generates
5-tuples with these members:
the token type (see token.py)
the token (a string)
the starting (row, column) indices of the token (a 2-tuple of ints)
the ending (row, column) indices of the token (a 2-tuple of ints)
the original line (string)
It is designed to match the working of the Python tokenizer exactly, except
that it produces COMMENT tokens for comments and gives type OP for all
operators
Older entry points
tokenize_loop(readline, tokeneater)
tokenize(readline, tokeneater=printtoken)
are the same, except instead of generating tokens, tokeneater is a callback
function to which the 5 fields described above are passed as 5 arguments,
each time a new token is found."""
__author__ = 'Ka-Ping Yee <[email protected]>'
__credits__ = \
'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro'
import string, re
from codecs import BOM_UTF8, lookup
from lib2to3.pgen2.token import *
from . import token
__all__ = [x for x in dir(token) if x[0] != '_'] + ["tokenize",
"generate_tokens", "untokenize"]
del token
try:
bytes
except NameError:
# Support bytes type in Python <= 2.5, so 2to3 turns itself into
# valid Python 3 code.
bytes = str
def group(*choices): return '(' + '|'.join(choices) + ')'
def any(*choices): return group(*choices) + '*'
def maybe(*choices): return group(*choices) + '?'
Whitespace = r'[ \f\t]*'
Comment = r'#[^\r\n]*'
Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
Name = r'[a-zA-Z_]\w*'
Binnumber = r'0[bB][01]*'
Hexnumber = r'0[xX][\da-fA-F]*[lL]?'
Octnumber = r'0[oO]?[0-7]*[lL]?'
Decnumber = r'[1-9]\d*[lL]?'
Intnumber = group(Binnumber, Hexnumber, Octnumber, Decnumber)
Exponent = r'[eE][-+]?\d+'
Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
Expfloat = r'\d+' + Exponent
Floatnumber = group(Pointfloat, Expfloat)
Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]')
Number = group(Imagnumber, Floatnumber, Intnumber)
# Tail end of ' string.
Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
# Tail end of " string.
Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
# Tail end of ''' string.
Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
# Tail end of """ string.
Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
Triple = group("[ubUB]?[rR]?'''", '[ubUB]?[rR]?"""')
# Single-line ' or " string.
String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
# Because of leftmost-then-longest match semantics, be sure to put the
# longest operators first (e.g., if = came before ==, == would get
# recognized as two instances of =).
Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
r"//=?", r"->",
r"[+\-*/%&|^=<>]=?",
r"~")
Bracket = '[][(){}]'
Special = group(r'\r?\n', r'[:;.,`@]')
Funny = group(Operator, Bracket, Special)
PlainToken = group(Number, Funny, String, Name)
Token = Ignore + PlainToken
# First (or only) line of ' or " string.
ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
group("'", r'\\\r?\n'),
r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
group('"', r'\\\r?\n'))
PseudoExtras = group(r'\\\r?\n', Comment, Triple)
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
tokenprog, pseudoprog, single3prog, double3prog = map(
re.compile, (Token, PseudoToken, Single3, Double3))
endprogs = {"'": re.compile(Single), '"': re.compile(Double),
"'''": single3prog, '"""': double3prog,
"r'''": single3prog, 'r"""': double3prog,
"u'''": single3prog, 'u"""': double3prog,
"b'''": single3prog, 'b"""': double3prog,
"ur'''": single3prog, 'ur"""': double3prog,
"br'''": single3prog, 'br"""': double3prog,
"R'''": single3prog, 'R"""': double3prog,
"U'''": single3prog, 'U"""': double3prog,
"B'''": single3prog, 'B"""': double3prog,
"uR'''": single3prog, 'uR"""': double3prog,
"Ur'''": single3prog, 'Ur"""': double3prog,
"UR'''": single3prog, 'UR"""': double3prog,
"bR'''": single3prog, 'bR"""': double3prog,
"Br'''": single3prog, 'Br"""': double3prog,
"BR'''": single3prog, 'BR"""': double3prog,
'r': None, 'R': None,
'u': None, 'U': None,
'b': None, 'B': None}
triple_quoted = {}
for t in ("'''", '"""',
"r'''", 'r"""', "R'''", 'R"""',
"u'''", 'u"""', "U'''", 'U"""',
"b'''", 'b"""', "B'''", 'B"""',
"ur'''", 'ur"""', "Ur'''", 'Ur"""',
"uR'''", 'uR"""', "UR'''", 'UR"""',
"br'''", 'br"""', "Br'''", 'Br"""',
"bR'''", 'bR"""', "BR'''", 'BR"""',):
triple_quoted[t] = t
single_quoted = {}
for t in ("'", '"',
"r'", 'r"', "R'", 'R"',
"u'", 'u"', "U'", 'U"',
"b'", 'b"', "B'", 'B"',
"ur'", 'ur"', "Ur'", 'Ur"',
"uR'", 'uR"', "UR'", 'UR"',
"br'", 'br"', "Br'", 'Br"',
"bR'", 'bR"', "BR'", 'BR"', ):
single_quoted[t] = t
tabsize = 8
class TokenError(Exception): pass
class StopTokenizing(Exception): pass
def printtoken(type, token, start, end, line): # for testing
(srow, scol) = start
(erow, ecol) = end
print "%d,%d-%d,%d:\t%s\t%s" % \
(srow, scol, erow, ecol, tok_name[type], repr(token))
def tokenize(readline, tokeneater=printtoken):
"""
The tokenize() function accepts two parameters: one representing the
input stream, and one providing an output mechanism for tokenize().
The first parameter, readline, must be a callable object which provides
the same interface as the readline() method of built-in file objects.
Each call to the function should return one line of input as a string.
The second parameter, tokeneater, must also be a callable object. It is
called once for each token, with five arguments, corresponding to the
tuples generated by generate_tokens().
"""
try:
tokenize_loop(readline, tokeneater)
except StopTokenizing:
pass
# backwards compatible interface
def tokenize_loop(readline, tokeneater):
for token_info in generate_tokens(readline):
tokeneater(*token_info)
class Untokenizer:
def __init__(self):
self.tokens = []
self.prev_row = 1
self.prev_col = 0
def add_whitespace(self, start):
row, col = start
assert row <= self.prev_row
col_offset = col - self.prev_col
if col_offset:
self.tokens.append(" " * col_offset)
def untokenize(self, iterable):
for t in iterable:
if len(t) == 2:
self.compat(t, iterable)
break
tok_type, token, start, end, line = t
self.add_whitespace(start)
self.tokens.append(token)
self.prev_row, self.prev_col = end
if tok_type in (NEWLINE, NL):
self.prev_row += 1
self.prev_col = 0
return "".join(self.tokens)
def compat(self, token, iterable):
startline = False
indents = []
toks_append = self.tokens.append
toknum, tokval = token
if toknum in (NAME, NUMBER):
tokval += ' '
if toknum in (NEWLINE, NL):
startline = True
for tok in iterable:
toknum, tokval = tok[:2]
if toknum in (NAME, NUMBER):
tokval += ' '
if toknum == INDENT:
indents.append(tokval)
continue
elif toknum == DEDENT:
indents.pop()
continue
elif toknum in (NEWLINE, NL):
startline = True
elif startline and indents:
toks_append(indents[-1])
startline = False
toks_append(tokval)
cookie_re = re.compile(r'^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)')
def _get_normal_name(orig_enc):
"""Imitates get_normal_name in tokenizer.c."""
# Only care about the first 12 characters.
enc = orig_enc[:12].lower().replace("_", "-")
if enc == "utf-8" or enc.startswith("utf-8-"):
return "utf-8"
if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
return "iso-8859-1"
return orig_enc
def detect_encoding(readline):
"""
The detect_encoding() function is used to detect the encoding that should
be used to decode a Python source file. It requires one argment, readline,
in the same way as the tokenize() generator.
It will call readline a maximum of twice, and return the encoding used
(as a string) and a list of any lines (left as bytes) it has read
in.
It detects the encoding from the presence of a utf-8 bom or an encoding
cookie as specified in pep-0263. If both a bom and a cookie are present, but
disagree, a SyntaxError will be raised. If the encoding cookie is an invalid
charset, raise a SyntaxError. Note that if a utf-8 bom is found,
'utf-8-sig' is returned.
If no encoding is specified, then the default of 'utf-8' will be returned.
"""
bom_found = False
encoding = None
default = 'utf-8'
def read_or_stop():
try:
return readline()
except StopIteration:
return bytes()
def find_cookie(line):
try:
line_string = line.decode('ascii')
except UnicodeDecodeError:
return None
match = cookie_re.match(line_string)
if not match:
return None
encoding = _get_normal_name(match.group(1))
try:
codec = lookup(encoding)
except LookupError:
# This behaviour mimics the Python interpreter
raise SyntaxError("unknown encoding: " + encoding)
if bom_found:
if codec.name != 'utf-8':
# This behaviour mimics the Python interpreter
raise SyntaxError('encoding problem: utf-8')
encoding += '-sig'
return encoding
first = read_or_stop()
if first.startswith(BOM_UTF8):
bom_found = True
first = first[3:]
default = 'utf-8-sig'
if not first:
return default, []
encoding = find_cookie(first)
if encoding:
return encoding, [first]
second = read_or_stop()
if not second:
return default, [first]
encoding = find_cookie(second)
if encoding:
return encoding, [first, second]
return default, [first, second]
def untokenize(iterable):
"""Transform tokens back into Python source code.
Each element returned by the iterable must be a token sequence
with at least two elements, a token number and token value. If
only two tokens are passed, the resulting output is poor.
Round-trip invariant for full input:
Untokenized source will match input source exactly
Round-trip invariant for limited intput:
# Output text will tokenize the back to the input
t1 = [tok[:2] for tok in generate_tokens(f.readline)]
newcode = untokenize(t1)
readline = iter(newcode.splitlines(1)).next
t2 = [tok[:2] for tokin generate_tokens(readline)]
assert t1 == t2
"""
ut = Untokenizer()
return ut.untokenize(iterable)
def generate_tokens(readline):
"""
The generate_tokens() generator requires one argment, readline, which
must be a callable object which provides the same interface as the
readline() method of built-in file objects. Each call to the function
should return one line of input as a string. Alternately, readline
can be a callable function terminating with StopIteration:
readline = open(myfile).next # Example of alternate readline
The generator produces 5-tuples with these members: the token type; the
token string; a 2-tuple (srow, scol) of ints specifying the row and
column where the token begins in the source; a 2-tuple (erow, ecol) of
ints specifying the row and column where the token ends in the source;
and the line on which the token was found. The line passed is the
logical line; continuation lines are included.
"""
lnum = parenlev = continued = 0
namechars, numchars = string.ascii_letters + '_', '0123456789'
contstr, needcont = '', 0
contline = None
indents = [0]
while 1: # loop over lines in stream
try:
line = readline()
except StopIteration:
line = ''
lnum = lnum + 1
pos, max = 0, len(line)
if contstr: # continued string
if not line:
raise TokenError, ("EOF in multi-line string", strstart)
endmatch = endprog.match(line)
if endmatch:
pos = end = endmatch.end(0)
yield (STRING, contstr + line[:end],
strstart, (lnum, end), contline + line)
contstr, needcont = '', 0
contline = None
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
yield (ERRORTOKEN, contstr + line,
strstart, (lnum, len(line)), contline)
contstr = ''
contline = None
continue
else:
contstr = contstr + line
contline = contline + line
continue
elif parenlev == 0 and not continued: # new statement
if not line: break
column = 0
while pos < max: # measure leading whitespace
if line[pos] == ' ': column = column + 1
elif line[pos] == '\t': column = (column//tabsize + 1)*tabsize
elif line[pos] == '\f': column = 0
else: break
pos = pos + 1
if pos == max: break
if line[pos] in '#\r\n': # skip comments or blank lines
if line[pos] == '#':
comment_token = line[pos:].rstrip('\r\n')
nl_pos = pos + len(comment_token)
yield (COMMENT, comment_token,
(lnum, pos), (lnum, pos + len(comment_token)), line)
yield (NL, line[nl_pos:],
(lnum, nl_pos), (lnum, len(line)), line)
else:
yield ((NL, COMMENT)[line[pos] == '#'], line[pos:],
(lnum, pos), (lnum, len(line)), line)
continue
if column > indents[-1]: # count indents or dedents
indents.append(column)
yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
while column < indents[-1]:
if column not in indents:
raise IndentationError(
"unindent does not match any outer indentation level",
("<tokenize>", lnum, pos, line))
indents = indents[:-1]
yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
else: # continued statement
if not line:
raise TokenError, ("EOF in multi-line statement", (lnum, 0))
continued = 0
while pos < max:
pseudomatch = pseudoprog.match(line, pos)
if pseudomatch: # scan for tokens
start, end = pseudomatch.span(1)
spos, epos, pos = (lnum, start), (lnum, end), end
token, initial = line[start:end], line[start]
if initial in numchars or \
(initial == '.' and token != '.'): # ordinary number
yield (NUMBER, token, spos, epos, line)
elif initial in '\r\n':
newline = NEWLINE
if parenlev > 0:
newline = NL
yield (newline, token, spos, epos, line)
elif initial == '#':
assert not token.endswith("\n")
yield (COMMENT, token, spos, epos, line)
elif token in triple_quoted:
endprog = endprogs[token]
endmatch = endprog.match(line, pos)
if endmatch: # all on one line
pos = endmatch.end(0)
token = line[start:pos]
yield (STRING, token, spos, (lnum, pos), line)
else:
strstart = (lnum, start) # multiple lines
contstr = line[start:]
contline = line
break
elif initial in single_quoted or \
token[:2] in single_quoted or \
token[:3] in single_quoted:
if token[-1] == '\n': # continued string
strstart = (lnum, start)
endprog = (endprogs[initial] or endprogs[token[1]] or
endprogs[token[2]])
contstr, needcont = line[start:], 1
contline = line
break
else: # ordinary string
yield (STRING, token, spos, epos, line)
elif initial in namechars: # ordinary name
yield (NAME, token, spos, epos, line)
elif initial == '\\': # continued stmt
# This yield is new; needed for better idempotency:
yield (NL, token, spos, (lnum, pos), line)
continued = 1
else:
if initial in '([{': parenlev = parenlev + 1
elif initial in ')]}': parenlev = parenlev - 1
yield (OP, token, spos, epos, line)
else:
yield (ERRORTOKEN, line[pos],
(lnum, pos), (lnum, pos+1), line)
pos = pos + 1
for indent in indents[1:]: # pop remaining indent levels
yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
if __name__ == '__main__': # testing
import sys
if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline)
else: tokenize(sys.stdin.readline)
|
apache-2.0
|
addition-it-solutions/project-all
|
addons/stock_picking_wave/__init__.py
|
8
|
1038
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import stock_picking_wave
import wizard
import controllers
|
agpl-3.0
|
Imlunar/DJStew
|
launcher.py
|
28
|
17847
|
from __future__ import print_function
import os
import sys
import subprocess
try: # Older Pythons lack this
import urllib.request # We'll let them reach the Python
from importlib.util import find_spec # check anyway
except ImportError:
pass
import platform
import webbrowser
import hashlib
import argparse
import shutil
import stat
import time
try:
import pip
except ImportError:
pip = None
REQS_DIR = "lib"
sys.path.insert(0, REQS_DIR)
REQS_TXT = "requirements.txt"
REQS_NO_AUDIO_TXT = "requirements_no_audio.txt"
FFMPEG_BUILDS_URL = "https://ffmpeg.zeranoe.com/builds/"
INTRO = ("==========================\n"
"Red Discord Bot - Launcher\n"
"==========================\n")
IS_WINDOWS = os.name == "nt"
IS_MAC = sys.platform == "darwin"
IS_64BIT = platform.machine().endswith("64")
INTERACTIVE_MODE = not len(sys.argv) > 1 # CLI flags = non-interactive
PYTHON_OK = sys.version_info >= (3, 5)
FFMPEG_FILES = {
"ffmpeg.exe" : "e0d60f7c0d27ad9d7472ddf13e78dc89",
"ffplay.exe" : "d100abe8281cbcc3e6aebe550c675e09",
"ffprobe.exe" : "0e84b782c0346a98434ed476e937764f"
}
def parse_cli_arguments():
parser = argparse.ArgumentParser(description="Red - Discord Bot's launcher")
parser.add_argument("--start", "-s",
help="Starts Red",
action="store_true")
parser.add_argument("--auto-restart",
help="Autorestarts Red in case of issues",
action="store_true")
parser.add_argument("--update-red",
help="Updates Red (git)",
action="store_true")
parser.add_argument("--update-reqs",
help="Updates requirements (w/ audio)",
action="store_true")
parser.add_argument("--update-reqs-no-audio",
help="Updates requirements (w/o audio)",
action="store_true")
parser.add_argument("--repair",
help="Issues a git reset --hard",
action="store_true")
return parser.parse_args()
def install_reqs(audio):
remove_reqs_readonly()
interpreter = sys.executable
if interpreter is None:
print("Python interpreter not found.")
return
txt = REQS_TXT if audio else REQS_NO_AUDIO_TXT
args = [
interpreter, "-m",
"pip", "install",
"--upgrade",
"--target", REQS_DIR,
"-r", txt
]
if IS_MAC: # --target is a problem on Homebrew. See PR #552
args.remove("--target")
args.remove(REQS_DIR)
code = subprocess.call(args)
if code == 0:
print("\nRequirements setup completed.")
else:
print("\nAn error occurred and the requirements setup might "
"not be completed. Consult the docs.\n")
def update_pip():
interpreter = sys.executable
if interpreter is None:
print("Python interpreter not found.")
return
args = [
interpreter, "-m",
"pip", "install",
"--upgrade", "pip"
]
code = subprocess.call(args)
if code == 0:
print("\nPip has been updated.")
else:
print("\nAn error occurred and pip might not have been updated.")
def update_red():
try:
code = subprocess.call(("git", "pull", "--ff-only"))
except FileNotFoundError:
print("\nError: Git not found. It's either not installed or not in "
"the PATH environment variable like requested in the guide.")
return
if code == 0:
print("\nRed has been updated")
else:
print("\nRed could not update properly. If this is caused by edits "
"you have made to the code you can try the repair option from "
"the Maintenance submenu")
def reset_red(reqs=False, data=False, cogs=False, git_reset=False):
if reqs:
try:
shutil.rmtree(REQS_DIR, onerror=remove_readonly)
print("Installed local packages have been wiped.")
except FileNotFoundError:
pass
except Exception as e:
print("An error occurred when trying to remove installed "
"requirements: {}".format(e))
if data:
try:
shutil.rmtree("data", onerror=remove_readonly)
print("'data' folder has been wiped.")
except FileNotFoundError:
pass
except Exception as e:
print("An error occurred when trying to remove the 'data' folder: "
"{}".format(e))
if cogs:
try:
shutil.rmtree("cogs", onerror=remove_readonly)
print("'cogs' folder has been wiped.")
except FileNotFoundError:
pass
except Exception as e:
print("An error occurred when trying to remove the 'cogs' folder: "
"{}".format(e))
if git_reset:
code = subprocess.call(("git", "reset", "--hard"))
if code == 0:
print("Red has been restored to the last local commit.")
else:
print("The repair has failed.")
def download_ffmpeg(bitness):
clear_screen()
repo = "https://github.com/Twentysix26/Red-DiscordBot/raw/master/"
verified = []
if bitness == "32bit":
print("Please download 'ffmpeg 32bit static' from the page that "
"is about to open.\nOnce done, open the 'bin' folder located "
"inside the zip.\nThere should be 3 files: ffmpeg.exe, "
"ffplay.exe, ffprobe.exe.\nPut all three of them into the "
"bot's main folder.")
time.sleep(4)
webbrowser.open(FFMPEG_BUILDS_URL)
return
for filename in FFMPEG_FILES:
if os.path.isfile(filename):
print("{} already present. Verifying integrity... "
"".format(filename), end="")
_hash = calculate_md5(filename)
if _hash == FFMPEG_FILES[filename]:
verified.append(filename)
print("Ok")
continue
else:
print("Hash mismatch. Redownloading.")
print("Downloading {}... Please wait.".format(filename))
with urllib.request.urlopen(repo + filename) as data:
with open(filename, "wb") as f:
f.write(data.read())
print("Download completed.")
for filename, _hash in FFMPEG_FILES.items():
if filename in verified:
continue
print("Verifying {}... ".format(filename), end="")
if not calculate_md5(filename) != _hash:
print("Passed.")
else:
print("Hash mismatch. Please redownload.")
print("\nAll files have been downloaded.")
def verify_requirements():
sys.path_importer_cache = {} # I don't know if the cache reset has any
basic = find_spec("discord") # side effect. Without it, the lib folder
audio = find_spec("nacl") # wouldn't be seen if it didn't exist
if not basic: # when the launcher was started
return None
elif not audio:
return False
else:
return True
def is_git_installed():
try:
subprocess.call(["git", "--version"], stdout=subprocess.DEVNULL,
stdin =subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
except FileNotFoundError:
return False
else:
return True
def requirements_menu():
clear_screen()
while True:
print(INTRO)
print("Main requirements:\n")
print("1. Install basic + audio requirements (recommended)")
print("2. Install basic requirements")
if IS_WINDOWS:
print("\nffmpeg (required for audio):")
print("3. Install ffmpeg 32bit")
if IS_64BIT:
print("4. Install ffmpeg 64bit (recommended on Windows 64bit)")
print("\n0. Go back")
choice = user_choice()
if choice == "1":
install_reqs(audio=True)
wait()
elif choice == "2":
install_reqs(audio=False)
wait()
elif choice == "3" and IS_WINDOWS:
download_ffmpeg(bitness="32bit")
wait()
elif choice == "4" and (IS_WINDOWS and IS_64BIT):
download_ffmpeg(bitness="64bit")
wait()
elif choice == "0":
break
clear_screen()
def update_menu():
clear_screen()
while True:
print(INTRO)
reqs = verify_requirements()
if reqs is None:
status = "No requirements installed"
elif reqs is False:
status = "Basic requirements installed (no audio)"
else:
status = "Basic + audio requirements installed"
print("Status: " + status + "\n")
print("Update:\n")
print("Red:")
print("1. Update Red + requirements (recommended)")
print("2. Update Red")
print("3. Update requirements")
print("\nOthers:")
print("4. Update pip (might require admin privileges)")
print("\n0. Go back")
choice = user_choice()
if choice == "1":
update_red()
print("Updating requirements...")
reqs = verify_requirements()
if reqs is not None:
install_reqs(audio=reqs)
else:
print("The requirements haven't been installed yet.")
wait()
elif choice == "2":
update_red()
wait()
elif choice == "3":
reqs = verify_requirements()
if reqs is not None:
install_reqs(audio=reqs)
else:
print("The requirements haven't been installed yet.")
wait()
elif choice == "4":
update_pip()
wait()
elif choice == "0":
break
clear_screen()
def maintenance_menu():
clear_screen()
while True:
print(INTRO)
print("Maintenance:\n")
print("1. Repair Red (discards code changes, keeps data intact)")
print("2. Wipe 'data' folder (all settings, cogs' data...)")
print("3. Wipe 'lib' folder (all local requirements / local installed"
" python packages)")
print("4. Factory reset")
print("\n0. Go back")
choice = user_choice()
if choice == "1":
print("Any code modification you have made will be lost. Data/"
"non-default cogs will be left intact. Are you sure?")
if user_pick_yes_no():
reset_red(git_reset=True)
wait()
elif choice == "2":
print("Are you sure? This will wipe the 'data' folder, which "
"contains all your settings and cogs' data.\nThe 'cogs' "
"folder, however, will be left intact.")
if user_pick_yes_no():
reset_red(data=True)
wait()
elif choice == "3":
reset_red(reqs=True)
wait()
elif choice == "4":
print("Are you sure? This will wipe ALL your Red's installation "
"data.\nYou'll lose all your settings, cogs and any "
"modification you have made.\nThere is no going back.")
if user_pick_yes_no():
reset_red(reqs=True, data=True, cogs=True, git_reset=True)
wait()
elif choice == "0":
break
clear_screen()
def run_red(autorestart):
interpreter = sys.executable
if interpreter is None: # This should never happen
raise RuntimeError("Couldn't find Python's interpreter")
if verify_requirements() is None:
print("You don't have the requirements to start Red. "
"Install them from the launcher.")
if not INTERACTIVE_MODE:
exit(1)
cmd = (interpreter, "red.py")
while True:
try:
code = subprocess.call(cmd)
except KeyboardInterrupt:
code = 0
break
else:
if code == 0:
break
elif code == 26:
print("Restarting Red...")
continue
else:
if not autorestart:
break
print("Red has been terminated. Exit code: %d" % code)
if INTERACTIVE_MODE:
wait()
def clear_screen():
if IS_WINDOWS:
os.system("cls")
else:
os.system("clear")
def wait():
if INTERACTIVE_MODE:
input("Press enter to continue.")
def user_choice():
return input("> ").lower().strip()
def user_pick_yes_no():
choice = None
yes = ("yes", "y")
no = ("no", "n")
while choice not in yes and choice not in no:
choice = input("Yes/No > ").lower().strip()
return choice in yes
def remove_readonly(func, path, excinfo):
os.chmod(path, 0o755)
func(path)
def remove_reqs_readonly():
"""Workaround for issue #569"""
if not os.path.isdir(REQS_DIR):
return
os.chmod(REQS_DIR, 0o755)
for root, dirs, files in os.walk(REQS_DIR):
for d in dirs:
os.chmod(os.path.join(root, d), 0o755)
for f in files:
os.chmod(os.path.join(root, f), 0o755)
def calculate_md5(filename):
hash_md5 = hashlib.md5()
with open(filename, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def create_fast_start_scripts():
"""Creates scripts for fast boot of Red without going
through the launcher"""
interpreter = sys.executable
if not interpreter:
return
call = "\"{}\" launcher.py".format(interpreter)
start_red = "{} --start".format(call)
start_red_autorestart = "{} --start --auto-restart".format(call)
modified = False
if IS_WINDOWS:
ccd = "pushd %~dp0\n"
pause = "\npause"
ext = ".bat"
else:
ccd = 'cd "$(dirname "$0")"\n'
pause = "\nread -rsp $'Press enter to continue...\\n'"
if not IS_MAC:
ext = ".sh"
else:
ext = ".command"
start_red = ccd + start_red + pause
start_red_autorestart = ccd + start_red_autorestart + pause
files = {
"start_red" + ext : start_red,
"start_red_autorestart" + ext : start_red_autorestart
}
if not IS_WINDOWS:
files["start_launcher" + ext] = ccd + call
for filename, content in files.items():
if not os.path.isfile(filename):
print("Creating {}... (fast start scripts)".format(filename))
modified = True
with open(filename, "w") as f:
f.write(content)
if not IS_WINDOWS and modified: # Let's make them executable on Unix
for script in files:
st = os.stat(script)
os.chmod(script, st.st_mode | stat.S_IEXEC)
def main():
print("Verifying git installation...")
has_git = is_git_installed()
is_git_installation = os.path.isdir(".git")
if IS_WINDOWS:
os.system("TITLE Red Discord Bot - Launcher")
clear_screen()
try:
create_fast_start_scripts()
except Exception as e:
print("Failed making fast start scripts: {}\n".format(e))
while True:
print(INTRO)
if not is_git_installation:
print("WARNING: It doesn't look like Red has been "
"installed with git.\nThis means that you won't "
"be able to update and some features won't be working.\n"
"A reinstallation is recommended. Follow the guide "
"properly this time:\n"
"https://twentysix26.github.io/Red-Docs/\n")
if not has_git:
print("WARNING: Git not found. This means that it's either not "
"installed or not in the PATH environment variable like "
"requested in the guide.\n")
print("1. Run Red /w autorestart in case of issues")
print("2. Run Red")
print("3. Update")
print("4. Install requirements")
print("5. Maintenance (repair, reset...)")
print("\n0. Quit")
choice = user_choice()
if choice == "1":
run_red(autorestart=True)
elif choice == "2":
run_red(autorestart=False)
elif choice == "3":
update_menu()
elif choice == "4":
requirements_menu()
elif choice == "5":
maintenance_menu()
elif choice == "0":
break
clear_screen()
args = parse_cli_arguments()
if __name__ == '__main__':
abspath = os.path.abspath(__file__)
dirname = os.path.dirname(abspath)
# Sets current directory to the script's
os.chdir(dirname)
if not PYTHON_OK:
print("Red needs Python 3.5 or superior. Install the required "
"version.\nPress enter to continue.")
if INTERACTIVE_MODE:
wait()
exit(1)
if pip is None:
print("Red cannot work without the pip module. Please make sure to "
"install Python without unchecking any option during the setup")
wait()
exit(1)
if args.repair:
reset_red(git_reset=True)
if args.update_red:
update_red()
if args.update_reqs:
install_reqs(audio=True)
elif args.update_reqs_no_audio:
install_reqs(audio=False)
if INTERACTIVE_MODE:
main()
elif args.start:
print("Starting Red...")
run_red(autorestart=args.auto_restart)
|
gpl-3.0
|
cswiercz/sympy
|
doc/src/conf.py
|
54
|
6371
|
# -*- coding: utf-8 -*-
#
# SymPy documentation build configuration file, created by
# sphinx-quickstart.py on Sat Mar 22 19:34:32 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# All configuration values have a default value; values that are commented out
# serve to show the default value.
import sys
import sympy
# If your extensions are in another directory, add it here.
sys.path = ['../sympy', 'ext'] + sys.path
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.addons.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.mathjax',
'numpydoc', 'sympylive', 'sphinx.ext.graphviz', ]
# Use this to use pngmath instead
#extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.pngmath', ]
# MathJax file, which is free to use. See http://www.mathjax.org/docs/2.0/start.html
mathjax_path = 'http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS_HTML-full'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General substitutions.
project = 'SymPy'
copyright = '2015 SymPy Development Team'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
# The short X.Y version.
version = sympy.__version__
# The full version, including alpha/beta/rc tags.
release = version
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'default.css'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
html_logo = '_static/sympylogo.png'
html_favicon = '../_build/logo/sympy-notailtext-favicon.ico'
# See http://sphinx-doc.org/theming.html#builtin-themes.
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Content template for the index page.
#html_index = ''
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
html_domain_indices = ['py-modindex']
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# Output file base name for HTML help builder.
htmlhelp_basename = 'SymPydoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual], toctree_only).
# toctree_only is set to True so that the start file document itself is not included in the
# output, only the documents referenced by it via TOC trees. The extra stuff in the master
# document is intended to show up in the HTML, but doesn't really belong in the LaTeX output.
latex_documents = [('index', 'sympy-%s.tex' % release, 'SymPy Documentation',
'SymPy Development Team', 'manual', True)]
# Additional stuff for the LaTeX preamble.
# Tweaked to work with XeTeX.
latex_elements = {
'babel': '',
'fontenc': r'''
\usepackage{bm}
\usepackage{amssymb}
\usepackage{fontspec}
\usepackage[english]{babel}
\defaultfontfeatures{Mapping=tex-text}
\setmainfont{DejaVu Serif}
\setsansfont{DejaVu Sans}
\setmonofont{DejaVu Sans Mono}
''',
'fontpkg': '',
'inputenc': '',
'utf8extra': '',
'preamble': r'''
% redefine \LaTeX to be usable in math mode
\expandafter\def\expandafter\LaTeX\expandafter{\expandafter\text\expandafter{\LaTeX}}
'''
}
# SymPy logo on title page
html_logo = '_static/sympylogo.png'
latex_logo = '_static/sympylogo_big.png'
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# Show page numbers next to internal references
latex_show_pagerefs = True
# We use False otherwise the module index gets generated twice.
latex_use_modindex = False
default_role = 'math'
pngmath_divpng_args = ['-gamma 1.5', '-D 110']
# Note, this is ignored by the mathjax extension
# Any \newcommand should be defined in the file
pngmath_latex_preamble = '\\usepackage{amsmath}\n' \
'\\usepackage{bm}\n' \
'\\usepackage{amsfonts}\n' \
'\\usepackage{amssymb}\n' \
'\\setlength{\\parindent}{0pt}\n'
texinfo_documents = [
(master_doc, 'sympy', 'SymPy Documentation', 'SymPy Development Team',
'SymPy', 'Computer algebra system (CAS) in Python', 'Programming', 1),
]
# Use svg for graphviz
graphviz_output_format = 'svg'
|
bsd-3-clause
|
nagyistoce/edx-analytics-dashboard
|
analytics_dashboard/help/utils.py
|
4
|
1292
|
import ConfigParser
import logging
from django.conf import settings
log = logging.getLogger(__name__)
def _get_config_value_with_default(section_name, option, default_option="default"):
"""
Args:
section_name: name of the section in the configuration from which the option should be found
option: name of the configuration option
default_option: name of the default configuration option whose value should be returned if the
requested option is not found
"""
try:
return settings.DOCS_CONFIG.get(section_name, option)
except (ConfigParser.NoOptionError, AttributeError):
log.debug("Didn't find a configuration option for '%s' section and '%s' option", section_name, option)
return settings.DOCS_CONFIG.get(section_name, default_option)
def get_doc_url(page_token=None):
"""
Returns:
The URL for the documentation
"""
return "{url_base}/{language}/{version}/{page_path}".format(
url_base=settings.DOCS_CONFIG.get("help_settings", "url_base"),
language=_get_config_value_with_default("locales", settings.LANGUAGE_CODE),
version=settings.DOCS_CONFIG.get("help_settings", "version"),
page_path=_get_config_value_with_default("pages", page_token),
)
|
agpl-3.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.