repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
drglove/SickRage
|
refs/heads/master
|
lib/chardet/hebrewprober.py
|
2928
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Shy Shalom
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe, eDetecting
from .compat import wrap_ord
# This prober doesn't actually recognize a language or a charset.
# It is a helper prober for the use of the Hebrew model probers
### General ideas of the Hebrew charset recognition ###
#
# Four main charsets exist in Hebrew:
# "ISO-8859-8" - Visual Hebrew
# "windows-1255" - Logical Hebrew
# "ISO-8859-8-I" - Logical Hebrew
# "x-mac-hebrew" - ?? Logical Hebrew ??
#
# Both "ISO" charsets use a completely identical set of code points, whereas
# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
# these code points. windows-1255 defines additional characters in the range
# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
# x-mac-hebrew defines similar additional code points but with a different
# mapping.
#
# As far as an average Hebrew text with no diacritics is concerned, all four
# charsets are identical with respect to code points. Meaning that for the
# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
# (including final letters).
#
# The dominant difference between these charsets is their directionality.
# "Visual" directionality means that the text is ordered as if the renderer is
# not aware of a BIDI rendering algorithm. The renderer sees the text and
# draws it from left to right. The text itself when ordered naturally is read
# backwards. A buffer of Visual Hebrew generally looks like so:
# "[last word of first line spelled backwards] [whole line ordered backwards
# and spelled backwards] [first word of first line spelled backwards]
# [end of line] [last word of second line] ... etc' "
# adding punctuation marks, numbers and English text to visual text is
# naturally also "visual" and from left to right.
#
# "Logical" directionality means the text is ordered "naturally" according to
# the order it is read. It is the responsibility of the renderer to display
# the text from right to left. A BIDI algorithm is used to place general
# punctuation marks, numbers and English text in the text.
#
# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
# what little evidence I could find, it seems that its general directionality
# is Logical.
#
# To sum up all of the above, the Hebrew probing mechanism knows about two
# charsets:
# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
# backwards while line order is natural. For charset recognition purposes
# the line order is unimportant (In fact, for this implementation, even
# word order is unimportant).
# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
#
# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
# specifically identified.
# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
# that contain special punctuation marks or diacritics is displayed with
# some unconverted characters showing as question marks. This problem might
# be corrected using another model prober for x-mac-hebrew. Due to the fact
# that x-mac-hebrew texts are so rare, writing another model prober isn't
# worth the effort and performance hit.
#
#### The Prober ####
#
# The prober is divided between two SBCharSetProbers and a HebrewProber,
# all of which are managed, created, fed data, inquired and deleted by the
# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
# fact some kind of Hebrew, Logical or Visual. The final decision about which
# one is it is made by the HebrewProber by combining final-letter scores
# with the scores of the two SBCharSetProbers to produce a final answer.
#
# The SBCSGroupProber is responsible for stripping the original text of HTML
# tags, English characters, numbers, low-ASCII punctuation characters, spaces
# and new lines. It reduces any sequence of such characters to a single space.
# The buffer fed to each prober in the SBCS group prober is pure text in
# high-ASCII.
# The two SBCharSetProbers (model probers) share the same language model:
# Win1255Model.
# The first SBCharSetProber uses the model normally as any other
# SBCharSetProber does, to recognize windows-1255, upon which this model was
# built. The second SBCharSetProber is told to make the pair-of-letter
# lookup in the language model backwards. This in practice exactly simulates
# a visual Hebrew model using the windows-1255 logical Hebrew model.
#
# The HebrewProber is not using any language model. All it does is look for
# final-letter evidence suggesting the text is either logical Hebrew or visual
# Hebrew. Disjointed from the model probers, the results of the HebrewProber
# alone are meaningless. HebrewProber always returns 0.00 as confidence
# since it never identifies a charset by itself. Instead, the pointer to the
# HebrewProber is passed to the model probers as a helper "Name Prober".
# When the Group prober receives a positive identification from any prober,
# it asks for the name of the charset identified. If the prober queried is a
# Hebrew model prober, the model prober forwards the call to the
# HebrewProber to make the final decision. In the HebrewProber, the
# decision is made according to the final-letters scores maintained and Both
# model probers scores. The answer is returned in the form of the name of the
# charset identified, either "windows-1255" or "ISO-8859-8".
# windows-1255 / ISO-8859-8 code points of interest
FINAL_KAF = 0xea
NORMAL_KAF = 0xeb
FINAL_MEM = 0xed
NORMAL_MEM = 0xee
FINAL_NUN = 0xef
NORMAL_NUN = 0xf0
FINAL_PE = 0xf3
NORMAL_PE = 0xf4
FINAL_TSADI = 0xf5
NORMAL_TSADI = 0xf6
# Minimum Visual vs Logical final letter score difference.
# If the difference is below this, don't rely solely on the final letter score
# distance.
MIN_FINAL_CHAR_DISTANCE = 5
# Minimum Visual vs Logical model score difference.
# If the difference is below this, don't rely at all on the model score
# distance.
MIN_MODEL_DISTANCE = 0.01
VISUAL_HEBREW_NAME = "ISO-8859-8"
LOGICAL_HEBREW_NAME = "windows-1255"
class HebrewProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mLogicalProber = None
self._mVisualProber = None
self.reset()
def reset(self):
self._mFinalCharLogicalScore = 0
self._mFinalCharVisualScore = 0
# The two last characters seen in the previous buffer,
# mPrev and mBeforePrev are initialized to space in order to simulate
# a word delimiter at the beginning of the data
self._mPrev = ' '
self._mBeforePrev = ' '
# These probers are owned by the group prober.
def set_model_probers(self, logicalProber, visualProber):
self._mLogicalProber = logicalProber
self._mVisualProber = visualProber
def is_final(self, c):
return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE,
FINAL_TSADI]
def is_non_final(self, c):
# The normal Tsadi is not a good Non-Final letter due to words like
# 'lechotet' (to chat) containing an apostrophe after the tsadi. This
# apostrophe is converted to a space in FilterWithoutEnglishLetters
# causing the Non-Final tsadi to appear at an end of a word even
# though this is not the case in the original text.
# The letters Pe and Kaf rarely display a related behavior of not being
# a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
# for example legally end with a Non-Final Pe or Kaf. However, the
# benefit of these letters as Non-Final letters outweighs the damage
# since these words are quite rare.
return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]
def feed(self, aBuf):
# Final letter analysis for logical-visual decision.
# Look for evidence that the received buffer is either logical Hebrew
# or visual Hebrew.
# The following cases are checked:
# 1) A word longer than 1 letter, ending with a final letter. This is
# an indication that the text is laid out "naturally" since the
# final letter really appears at the end. +1 for logical score.
# 2) A word longer than 1 letter, ending with a Non-Final letter. In
# normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
# should not end with the Non-Final form of that letter. Exceptions
# to this rule are mentioned above in isNonFinal(). This is an
# indication that the text is laid out backwards. +1 for visual
# score
# 3) A word longer than 1 letter, starting with a final letter. Final
# letters should not appear at the beginning of a word. This is an
# indication that the text is laid out backwards. +1 for visual
# score.
#
# The visual score and logical score are accumulated throughout the
# text and are finally checked against each other in GetCharSetName().
# No checking for final letters in the middle of words is done since
# that case is not an indication for either Logical or Visual text.
#
# We automatically filter out all 7-bit characters (replace them with
# spaces) so the word boundary detection works properly. [MAP]
if self.get_state() == eNotMe:
# Both model probers say it's not them. No reason to continue.
return eNotMe
aBuf = self.filter_high_bit_only(aBuf)
for cur in aBuf:
if cur == ' ':
# We stand on a space - a word just ended
if self._mBeforePrev != ' ':
# next-to-last char was not a space so self._mPrev is not a
# 1 letter word
if self.is_final(self._mPrev):
# case (1) [-2:not space][-1:final letter][cur:space]
self._mFinalCharLogicalScore += 1
elif self.is_non_final(self._mPrev):
# case (2) [-2:not space][-1:Non-Final letter][
# cur:space]
self._mFinalCharVisualScore += 1
else:
# Not standing on a space
if ((self._mBeforePrev == ' ') and
(self.is_final(self._mPrev)) and (cur != ' ')):
# case (3) [-2:space][-1:final letter][cur:not space]
self._mFinalCharVisualScore += 1
self._mBeforePrev = self._mPrev
self._mPrev = cur
# Forever detecting, till the end or until both model probers return
# eNotMe (handled above)
return eDetecting
def get_charset_name(self):
# Make the decision: is it Logical or Visual?
# If the final letter score distance is dominant enough, rely on it.
finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
if finalsub >= MIN_FINAL_CHAR_DISTANCE:
return LOGICAL_HEBREW_NAME
if finalsub <= -MIN_FINAL_CHAR_DISTANCE:
return VISUAL_HEBREW_NAME
# It's not dominant enough, try to rely on the model scores instead.
modelsub = (self._mLogicalProber.get_confidence()
- self._mVisualProber.get_confidence())
if modelsub > MIN_MODEL_DISTANCE:
return LOGICAL_HEBREW_NAME
if modelsub < -MIN_MODEL_DISTANCE:
return VISUAL_HEBREW_NAME
# Still no good, back to final letter distance, maybe it'll save the
# day.
if finalsub < 0.0:
return VISUAL_HEBREW_NAME
# (finalsub > 0 - Logical) or (don't know what to do) default to
# Logical.
return LOGICAL_HEBREW_NAME
def get_state(self):
# Remain active as long as any of the model probers are active.
if (self._mLogicalProber.get_state() == eNotMe) and \
(self._mVisualProber.get_state() == eNotMe):
return eNotMe
return eDetecting
|
FlaPer87/django-nonrel
|
refs/heads/master
|
django/contrib/localflavor/id/id_choices.py
|
65
|
from django.utils.translation import ugettext_lazy as _
# Reference: http://id.wikipedia.org/wiki/Daftar_provinsi_Indonesia
# Indonesia does not have an official Province code standard.
# I decided to use unambiguous and consistent (some are common) 3-letter codes.
PROVINCE_CHOICES = (
('BLI', _('Bali')),
('BTN', _('Banten')),
('BKL', _('Bengkulu')),
('DIY', _('Yogyakarta')),
('JKT', _('Jakarta')),
('GOR', _('Gorontalo')),
('JMB', _('Jambi')),
('JBR', _('Jawa Barat')),
('JTG', _('Jawa Tengah')),
('JTM', _('Jawa Timur')),
('KBR', _('Kalimantan Barat')),
('KSL', _('Kalimantan Selatan')),
('KTG', _('Kalimantan Tengah')),
('KTM', _('Kalimantan Timur')),
('BBL', _('Kepulauan Bangka-Belitung')),
('KRI', _('Kepulauan Riau')),
('LPG', _('Lampung')),
('MLK', _('Maluku')),
('MUT', _('Maluku Utara')),
('NAD', _('Nanggroe Aceh Darussalam')),
('NTB', _('Nusa Tenggara Barat')),
('NTT', _('Nusa Tenggara Timur')),
('PPA', _('Papua')),
('PPB', _('Papua Barat')),
('RIU', _('Riau')),
('SLB', _('Sulawesi Barat')),
('SLS', _('Sulawesi Selatan')),
('SLT', _('Sulawesi Tengah')),
('SLR', _('Sulawesi Tenggara')),
('SLU', _('Sulawesi Utara')),
('SMB', _('Sumatera Barat')),
('SMS', _('Sumatera Selatan')),
('SMU', _('Sumatera Utara')),
)
LICENSE_PLATE_PREFIX_CHOICES = (
('A', _('Banten')),
('AA', _('Magelang')),
('AB', _('Yogyakarta')),
('AD', _('Surakarta - Solo')),
('AE', _('Madiun')),
('AG', _('Kediri')),
('B', _('Jakarta')),
('BA', _('Sumatera Barat')),
('BB', _('Tapanuli')),
('BD', _('Bengkulu')),
('BE', _('Lampung')),
('BG', _('Sumatera Selatan')),
('BH', _('Jambi')),
('BK', _('Sumatera Utara')),
('BL', _('Nanggroe Aceh Darussalam')),
('BM', _('Riau')),
('BN', _('Kepulauan Bangka Belitung')),
('BP', _('Kepulauan Riau')),
('CC', _('Corps Consulate')),
('CD', _('Corps Diplomatic')),
('D', _('Bandung')),
('DA', _('Kalimantan Selatan')),
('DB', _('Sulawesi Utara Daratan')),
('DC', _('Sulawesi Barat')),
('DD', _('Sulawesi Selatan')),
('DE', _('Maluku')),
('DG', _('Maluku Utara')),
('DH', _('NTT - Timor')),
('DK', _('Bali')),
('DL', _('Sulawesi Utara Kepulauan')),
('DM', _('Gorontalo')),
('DN', _('Sulawesi Tengah')),
('DR', _('NTB - Lombok')),
('DS', _('Papua dan Papua Barat')),
('DT', _('Sulawesi Tenggara')),
('E', _('Cirebon')),
('EA', _('NTB - Sumbawa')),
('EB', _('NTT - Flores')),
('ED', _('NTT - Sumba')),
('F', _('Bogor')),
('G', _('Pekalongan')),
('H', _('Semarang')),
('K', _('Pati')),
('KB', _('Kalimantan Barat')),
('KH', _('Kalimantan Tengah')),
('KT', _('Kalimantan Timur')),
('L', _('Surabaya')),
('M', _('Madura')),
('N', _('Malang')),
('P', _('Jember')),
('R', _('Banyumas')),
('RI', _('Federal Government')),
('S', _('Bojonegoro')),
('T', _('Purwakarta')),
('W', _('Sidoarjo')),
('Z', _('Garut')),
)
|
JioCloud/horizon
|
refs/heads/master
|
openstack_dashboard/dashboards/admin/networks/ports/tabs.py
|
21
|
# Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tabs
from openstack_dashboard import api
class OverviewTab(tabs.Tab):
name = _("Overview")
slug = "overview"
template_name = "project/networks/ports/_detail_overview.html"
def get_context_data(self, request):
port_id = self.tab_group.kwargs['port_id']
try:
port = api.neutron.port_get(self.request, port_id)
except Exception:
redirect = reverse('horizon:admin:networks:index')
msg = _('Unable to retrieve port details.')
exceptions.handle(request, msg, redirect=redirect)
return {'port': port}
class PortDetailTabs(tabs.TabGroup):
slug = "port_details"
tabs = (OverviewTab,)
|
yavalvas/yav_com
|
refs/heads/master
|
build/matplotlib/examples/api/date_index_formatter.py
|
6
|
"""
When plotting time series, eg financial time series, one often wants
to leave out days on which there is no data, eh weekends. The example
below shows how to use an 'index formatter' to achieve the desired plot
"""
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
import matplotlib.cbook as cbook
import matplotlib.ticker as ticker
datafile = cbook.get_sample_data('aapl.csv', asfileobj=False)
print ('loading %s' % datafile)
r = mlab.csv2rec(datafile)
r.sort()
r = r[-30:] # get the last 30 days
# first we'll do it the default way, with gaps on weekends
fig, ax = plt.subplots()
ax.plot(r.date, r.adj_close, 'o-')
fig.autofmt_xdate()
# next we'll write a custom formatter
N = len(r)
ind = np.arange(N) # the evenly spaced plot indices
def format_date(x, pos=None):
thisind = np.clip(int(x+0.5), 0, N-1)
return r.date[thisind].strftime('%Y-%m-%d')
fig, ax = plt.subplots()
ax.plot(ind, r.adj_close, 'o-')
ax.xaxis.set_major_formatter(ticker.FuncFormatter(format_date))
fig.autofmt_xdate()
plt.show()
|
kinzhang/cocos2d-js-v3.2
|
refs/heads/master
|
frameworks/js-bindings/cocos2d-x/setup.py
|
2
|
#!/usr/bin/python
# coding=utf-8
"""****************************************************************************
Copyright (c) 2014 cocos2d-x.org
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************"""
'''
This script will install environment variables needed to by cocos2d-x. It will set these envrironment variables:
* COCOS_CONSOLE_ROOT: used to run cocos console tools, more information about cocos console tools please refer to
https://github.com/cocos2d/cocos2d-console
* NDK_ROOT: used to build android native codes
* ANDROID_SDK_ROOT: used to generate applicatoin on Android through commands
* ANT_ROOT: used to generate applicatoin on Android through commands
* COCOS_X_ROOT: path where cocos2d-x is installed
* COCOS_TEMPLATES_ROOT: path where cocos2d-x's templates are installed
On Max OS X, when start a shell, it will read these files and execute commands in sequence:
~/.bash_profile
~/.bash_login
~/.profile
And it will read only one of them. So we will add environment variable in the same sequence.
Which means that
* add environment variables into ~/.bash_profile if it exists
* otherwise it will the add environment variables into ~/.bash_login if it exists
* otherwise it will the add environment variables into ~/.profile if it exists
Will create ~/.bash_profile when none of them exist, and add environment variables into it.
'''
import os
import sys
import fileinput
import shutil
import subprocess
from optparse import OptionParser
COCOS_CONSOLE_ROOT = 'COCOS_CONSOLE_ROOT'
COCOS_X_ROOT = 'COCOS_X_ROOT'
COCOS_TEMPLATES_ROOT = 'COCOS_TEMPLATES_ROOT'
NDK_ROOT = 'NDK_ROOT'
ANDROID_SDK_ROOT = 'ANDROID_SDK_ROOT'
ANT_ROOT = 'ANT_ROOT'
def _check_python_version():
major_ver = sys.version_info[0]
if major_ver > 2:
print ("The python version is %d.%d. But python 2.x is required. (Version 2.7 is well tested)\n"
"Download it here: https://www.python.org/" % (major_ver, sys.version_info[1]))
return False
return True
class SetEnvVar(object):
RESULT_UPDATE_FAILED = -2
RESULT_ADD_FAILED = -1
RESULT_DO_NOTHING = 0
RESULT_UPDATED = 1
RESULT_ADDED = 2
MAC_CHECK_FILES = ['.bash_profile', '.bash_login', '.profile']
LINUX_CHECK_FILES = ['.bashrc']
ZSH_CHECK_FILES = ['.zshrc']
RE_FORMAT = r'^export[ \t]+%s=(.+)'
def __init__(self):
self.need_backup = True
self.backup_file = None
self.current_absolute_path = os.path.dirname(
os.path.realpath(__file__))
self.file_used_for_setup = ''
def _isWindows(self):
return sys.platform == 'win32'
def _isLinux(self):
return sys.platform.startswith('linux')
def _is_mac(self):
return sys.platform == 'darwin'
def _is_zsh(self):
shellItem = os.environ.get('SHELL')
if shellItem is not None:
if len(shellItem) >= 3:
return shellItem[-3:] == "zsh"
return False
def _get_unix_file_list(self):
file_list = None
if self._is_zsh():
file_list = SetEnvVar.ZSH_CHECK_FILES
elif self._isLinux():
file_list = SetEnvVar.LINUX_CHECK_FILES
elif self._is_mac():
file_list = SetEnvVar.MAC_CHECK_FILES
return file_list
def _get_filepath_for_setup(self):
file_list = self._get_unix_file_list()
file_to_write = None
if file_list is None:
return ''
home = os.path.expanduser('~')
for file_name in file_list:
file_path = os.path.join(home, file_name)
if os.path.exists(file_path):
file_to_write = file_path
break
if file_to_write is None:
self.need_backup = False
file_to_write = os.path.join(home, file_list[0])
file_obj = open(file_to_write, 'w')
file_obj.close()
return file_to_write
# modify registry table to add an environment variable on windows
def _set_environment_variable_win32(self, key, value):
ret = False
import _winreg
try:
env = None
env = _winreg.OpenKeyEx(_winreg.HKEY_CURRENT_USER,
'Environment',
0,
_winreg.KEY_SET_VALUE | _winreg.KEY_READ)
_winreg.SetValueEx(env, key, 0, _winreg.REG_SZ, value)
_winreg.FlushKey(env)
_winreg.CloseKey(env)
ret = True
except Exception:
if env:
_winreg.CloseKey(env)
ret = False
return ret
def _gen_backup_file(self):
file_name = os.path.basename(self.file_used_for_setup)
file_path = os.path.dirname(self.file_used_for_setup)
backup_file_name = file_name + ".backup"
path = os.path.join(file_path, backup_file_name)
i = 1
while os.path.exists(path):
backup_file_name = file_name + ".backup%d" % i
path = os.path.join(file_path, backup_file_name)
i += 1
return path
def _set_environment_variable_unix(self, key, value):
if self.need_backup:
# backup the environment file
self.backup_file = self._gen_backup_file()
shutil.copy(self.file_used_for_setup, self.backup_file)
self.need_backup = False
file = open(self.file_used_for_setup, 'a')
file.write('\n# Add environment variable %s for cocos2d-x\n' % key)
file.write('export %s=%s\n' % (key, value))
file.write('export PATH=$%s:$PATH\n' % key)
if key == ANDROID_SDK_ROOT:
file.write(
'export PATH=$%s/tools:$%s/platform-tools:$PATH\n' % (key, key))
file.close()
return True
def _set_environment_variable(self, key, value):
print(" -> Add %s environment variable..." % key)
ret = False
if self._isWindows():
ret = self._set_environment_variable_win32(key, value)
else:
ret = self._set_environment_variable_unix(key, value)
if ret:
print(" ->Added %s=%s\n" % (key, value))
else:
print(" ->Add failed\n")
return ret
def _search_unix_variable(self, var_name, file_name):
if not os.path.isfile(file_name):
return None
import re
str_re = SetEnvVar.RE_FORMAT % var_name
patten = re.compile(str_re)
ret = None
for line in open(file_name):
str1 = line.lstrip(' \t')
match = patten.match(str1)
if match is not None:
ret = match.group(1)
return ret
def _find_environment_variable(self, var):
print(" ->Search for environment variable %s..." % var)
ret = None
try:
ret = os.environ[var]
except Exception:
if not self._isWindows():
file_list = self._get_unix_file_list()
if file_list is not None:
home = os.path.expanduser('~')
for name in file_list:
path = os.path.join(home, name)
ret = self._search_unix_variable(var, path)
if ret is not None:
break
else:
import _winreg
try:
env = None
env = _winreg.OpenKeyEx(_winreg.HKEY_CURRENT_USER,
'Environment',
0,
_winreg.KEY_READ)
ret = _winreg.QueryValueEx(env, var)[0]
_winreg.CloseKey(env)
except Exception:
if env:
_winreg.CloseKey(env)
ret = None
if ret is None:
print(" ->%s not found\n" % var)
else:
print(" ->%s is found : %s\n" % (var, ret))
return ret
def _get_input_value(self, var_name):
ret = raw_input(
' ->Please enter the path of %s (or press Enter to skip):' % var_name)
ret.rstrip(" \t")
return ret
# python on linux doesn't include Tkinter model, so let user input in terminal
# if self._isLinux():
# input_value = raw_input('Couldn\'t find the "%s" envrironment variable. Please enter it: ' % sys_var)
# else:
# pop up a window to let user select path for ndk root
# import Tkinter
# import tkFileDialog
# self.tmp_input_value = None
# root = Tkinter.Tk()
# if sys_var == NDK_ROOT:
# root.wm_title('Set NDK_ROOT')
# else:
# root.wm_title('Set ANDROID_SDK_ROOT')
# def callback():
# self.tmp_input_value = tkFileDialog.askdirectory()
# root.destroy()
# if sys_var == NDK_ROOT:
# label_content = 'Select path for Android NDK:'
# label_help = """
# The Android NDK is needed to develop games for Android.
# For further information, go to:
# http://developer.android.com/tools/sdk/ndk/index.html.
# You can safely skip this step now. You can set the NDK_ROOT later.
# """
# if sys_var == ANDROID_SDK_ROOT:
# label_content = 'Select path for Android SDK'
# label_help = """
# The Android SDK is needed to develop games for Android.
# For further information, go to:
# https://developer.android.com/tools/sdk/ndk/index.html.
# You can safely skip this step now. You can set the ANDROID_SDK_ROOT later.
# """
# Tkinter.Label(root, text=label_help).pack()
# Tkinter.Button(root, text=label_content, command=callback).pack()
# self._center(root)
# root.mainloop()
# input_value = self.tmp_input_value
# self.tmp_input_value = None
# return input_value
# display a window in center and put it on top
# def _center(self, win):
# win.update_idletasks()
# width = win.winfo_width()
# height = win.winfo_height()
# x = (win.winfo_screenwidth() / 2) - (width / 2)
# y = (win.winfo_screenheight() / 2) - (height / 2)
# win.geometry('{}x{}+{}+{}'.format(width, height, x, y))
# win.wm_attributes('-topmost', 1)
def _check_valid(self, var_name, value):
ret = False
if var_name == NDK_ROOT:
ret = self._is_ndk_root_valid(value)
elif var_name == ANDROID_SDK_ROOT:
ret = self._is_android_sdk_root_valid(value)
elif var_name == ANT_ROOT:
ret = self._is_ant_root_valid(value)
else:
ret = False
if not ret:
print(
' ->Error: "%s" is not a valid path of %s. Ignoring it.' % (value, var_name))
return ret
def _is_ndk_root_valid(self, ndk_root):
if not ndk_root:
return False
ndk_build_path = os.path.join(ndk_root, 'ndk-build')
if os.path.isfile(ndk_build_path):
return True
else:
return False
def _is_android_sdk_root_valid(self, android_sdk_root):
if not android_sdk_root:
return False
if self._isWindows():
android_path = os.path.join(
android_sdk_root, 'tools', 'android.bat')
else:
android_path = os.path.join(android_sdk_root, 'tools', 'android')
if os.path.isfile(android_path):
return True
else:
return False
def _is_ant_root_valid(self, ant_root):
ant_path = ''
if self._isWindows():
ant_path = os.path.join(ant_root, 'ant.bat')
else:
ant_path = os.path.join(ant_root, 'ant')
if os.path.isfile(ant_path):
return True
else:
return False
def remove_dir_from_win_path(self, remove_dir):
import _winreg
try:
env = None
path = None
env = _winreg.OpenKeyEx(_winreg.HKEY_CURRENT_USER,
'Environment',
0,
_winreg.KEY_SET_VALUE | _winreg.KEY_READ)
path = _winreg.QueryValueEx(env, 'Path')[0]
path_lower = path.lower()
remove_dir = remove_dir.replace('/', '\\')
remove_dir_lower = remove_dir.lower()
start_pos = path_lower.find(remove_dir_lower)
if (start_pos >= 0):
length = len(remove_dir_lower)
need_remove = path[start_pos:(start_pos + length)]
path = path.replace(need_remove, '')
path = path.replace(';;', ';')
_winreg.SetValueEx(env, 'Path', 0, _winreg.REG_SZ, path)
_winreg.FlushKey(env)
_winreg.CloseKey(env)
print(' ->Remove directory \"%s\" from PATH!\n' % remove_dir)
except Exception:
print(' ->Remove directory \"%s\" from PATH failed!\n' %
remove_dir)
def set_windows_path(self, add_dir):
ret = False
import _winreg
try:
env = None
path = None
env = _winreg.OpenKeyEx(_winreg.HKEY_CURRENT_USER,
'Environment',
0,
_winreg.KEY_SET_VALUE | _winreg.KEY_READ)
path = _winreg.QueryValueEx(env, 'Path')[0]
# add variable if can't find it in PATH
path_lower = path.lower()
add_dir_lower = add_dir.lower()
if (path_lower.find(add_dir_lower) == -1):
path = add_dir + ';' + path
_winreg.SetValueEx(env, 'Path', 0, _winreg.REG_SZ, path)
_winreg.FlushKey(env)
_winreg.CloseKey(env)
ret = True
except Exception:
if not path:
path = add_dir
_winreg.SetValueEx(env, 'Path', 0, _winreg.REG_SZ, path)
_winreg.FlushKey(env)
ret = True
else:
_winreg.SetValueEx(env, 'Path', 0, _winreg.REG_SZ, path)
_winreg.FlushKey(env)
ret = False
if env:
_winreg.CloseKey(env)
if ret:
print(" ->Add directory \"%s\" into PATH succeed!\n" % add_dir)
else:
print(" ->Add directory \"%s\" into PATH failed!\n" % add_dir)
def set_console_root(self):
print("->Check environment variable %s" % COCOS_CONSOLE_ROOT)
cocos_consle_root = os.path.join(
self.current_absolute_path, 'tools', 'cocos2d-console', 'bin')
old_dir = self._find_environment_variable(COCOS_CONSOLE_ROOT)
if old_dir is None:
# add environment variable
if self._isWindows():
self.set_windows_path(cocos_consle_root)
self._set_environment_variable(
COCOS_CONSOLE_ROOT, cocos_consle_root)
else:
if old_dir == cocos_consle_root:
# is same with before, nothing to do
return
# update the environment variable
if self._isWindows():
self.remove_dir_from_win_path(old_dir)
self.set_windows_path(cocos_consle_root)
self._force_update_env(COCOS_CONSOLE_ROOT, cocos_consle_root)
def set_cocos_x_root(self):
print("->Check environment variable %s" % COCOS_X_ROOT)
cocos_x_root = self.current_absolute_path
old_dir = self._find_environment_variable(COCOS_X_ROOT)
if old_dir is None:
# add environment variable
if self._isWindows():
self.set_windows_path(cocos_x_root)
self._set_environment_variable(COCOS_X_ROOT, cocos_x_root)
else:
if old_dir == cocos_x_root:
# is same with before, nothing to do
return
# update the environment variable
if self._isWindows():
self.remove_dir_from_win_path(old_dir)
self.set_windows_path(cocos_x_root)
self._force_update_env(COCOS_X_ROOT, cocos_x_root)
def set_templates_root(self):
print("->Check environment variable %s" % COCOS_TEMPLATES_ROOT)
cocos_templates_root = os.path.join(self.current_absolute_path, 'templates')
old_dir = self._find_environment_variable(COCOS_TEMPLATES_ROOT)
if old_dir is None:
# add environment variable
if self._isWindows():
self.set_windows_path(cocos_templates_root)
self._set_environment_variable(COCOS_TEMPLATES_ROOT, cocos_templates_root)
else:
if old_dir == cocos_templates_root:
# is same with before, nothing to do
return
# update the environment variable
if self._isWindows():
self.remove_dir_from_win_path(old_dir)
self.set_windows_path(cocos_templates_root)
self._force_update_env(COCOS_TEMPLATES_ROOT, cocos_templates_root)
def _force_update_unix_env(self, var_name, value):
import re
home = os.path.expanduser('~')
str_re = SetEnvVar.RE_FORMAT % var_name
patten = re.compile(str_re)
replace_str = 'export %s=%s\n' % (var_name, value)
file_list = SetEnvVar.MAC_CHECK_FILES
if self._isLinux():
file_list = SetEnvVar.LINUX_CHECK_FILES
print(" ->Update variable %s in files %s" %
(var_name, str(file_list)))
variable_updated = False
for file_name in file_list:
path = os.path.join(home, file_name)
if os.path.isfile(path):
lines = []
# read files
need_over_write = False
file_obj = open(path, 'r')
for line in file_obj:
str_temp = line.lstrip(' \t')
match = patten.match(str_temp)
if match is not None:
variable_updated = True
need_over_write = True
lines.append(replace_str)
else:
lines.append(line)
file_obj.close()
# rewrite file
if need_over_write:
file_obj = open(path, 'w')
file_obj.writelines(lines)
file_obj.close()
print(" ->File %s updated!" % path)
# nothing updated, should add variable
if not variable_updated:
print("\n ->No files updated, add variable %s instead!" %
var_name)
ret = self._set_environment_variable(var_name, value)
else:
ret = True
return ret
def _force_update_env(self, var_name, value):
ret = False
if self._isWindows():
print(" ->Force update environment variable %s" % var_name)
ret = self._set_environment_variable_win32(var_name, value)
if not ret:
print(" ->Failed!")
else:
print(" ->Succeed : %s=%s" % (var_name, value))
else:
ret = self._force_update_unix_env(var_name, value)
return ret
def _get_ant_path(self):
return self._get_sdkpath_for_cmd("ant", False)
def _get_androidsdk_path(self):
return self._get_sdkpath_for_cmd("android")
def _get_ndkbuild_path(self):
return self._get_sdkpath_for_cmd("ndk-build", False)
def _get_sdkpath_for_cmd(self, cmd, has_bin_folder=True):
ret = None
print(" ->Search for command " + cmd + " in system...")
if not self._isWindows():
import commands
state, result = commands.getstatusoutput("which " + cmd)
if state == 0:
ret = os.path.realpath(result)
ret = os.path.dirname(ret)
# Use parent folder if has_bin_folder was set
if has_bin_folder:
ret = os.path.dirname(ret)
if ret is not None:
print(" ->Path " + ret + " was found\n")
else:
print(" ->Command " + cmd + " not found\n")
return ret
def _find_value_from_sys(self, var_name):
if var_name == ANT_ROOT:
return self._get_ant_path()
elif var_name == NDK_ROOT:
return self._get_ndkbuild_path()
elif var_name == ANDROID_SDK_ROOT:
return self._get_androidsdk_path()
else:
return None
def set_variable(self, var_name, value):
print("->Check environment variable %s" % var_name)
find_value = self._find_environment_variable(var_name)
var_found = (find_value is not None)
action_none = 0
action_add = 1
action_update = 2
need_action = action_none
if var_found:
if value and self._check_valid(var_name, value):
# should update
need_action = action_update
else:
# do nothing
need_action = action_none
else:
if not value:
# find the command path in system
value = self._find_value_from_sys(var_name)
if not value:
value = self._get_input_value(var_name)
if value and self._check_valid(var_name, value):
# should add variable
need_action = action_add
else:
# do nothing
need_action = action_none
if need_action == action_none:
# do nothing
return SetEnvVar.RESULT_DO_NOTHING
elif need_action == action_add:
# add variable
if self._set_environment_variable(var_name, value):
return SetEnvVar.RESULT_ADDED
else:
return SetEnvVar.RESULT_ADD_FAILED
elif need_action == action_update:
# update variable
if self._force_update_env(var_name, value):
# update succeed
return SetEnvVar.RESULT_UPDATED
else:
# update failed
return SetEnvVar.RESULT_UPDATE_FAILED
else:
return SetEnvVar.RESULT_DO_NOTHING
def set_environment_variables(self, ndk_root, android_sdk_root, ant_root):
print('\nSetting up cocos2d-x...')
self.file_used_for_setup = self._get_filepath_for_setup()
self.set_console_root()
self.set_cocos_x_root()
self.set_templates_root()
if self._isWindows():
print(
'->Configuration for Android platform only, you can also skip and manually edit your environment variables\n')
else:
print('->Configuration for Android platform only, you can also skip and manually edit "%s"\n' %
self.file_used_for_setup)
ndk_ret = self.set_variable(NDK_ROOT, ndk_root)
sdk_ret = self.set_variable(ANDROID_SDK_ROOT, android_sdk_root)
ant_ret = self.set_variable(ANT_ROOT, ant_root)
# tip the backup file
if (self.backup_file is not None) and (os.path.exists(self.backup_file)):
print('\nA backup file \"%s\" is created for \"%s\".' %
(self.backup_file, self.file_used_for_setup))
if self._isWindows():
print(
'\nPlease restart the terminal or restart computer to make added system variables take effect\n')
else:
print('\nPlease execute command: "source %s" to make added system variables take effect\n' %
self.file_used_for_setup)
if __name__ == '__main__':
if not _check_python_version():
exit()
parser = OptionParser()
parser.add_option(
'-n', '--ndkroot', dest='ndk_root', help='directory of ndk root')
parser.add_option('-a', '--androidsdkroot',
dest='android_sdk_root', help='directory of android sdk root')
parser.add_option(
'-t', '--antroot', dest='ant_root', help='directory that contains ant/ant.bat')
opts, args = parser.parse_args()
# set environment variables
env = SetEnvVar()
env.set_environment_variables(
opts.ndk_root, opts.android_sdk_root, opts.ant_root)
if env._isWindows():
import ctypes
HWND_BROADCAST = 0xFFFF
WM_SETTINGCHANGE = 0x1A
SMTO_ABORTIFHUNG = 0x0002
result = ctypes.c_long()
SendMessageTimeoutW = ctypes.windll.user32.SendMessageTimeoutW
SendMessageTimeoutW(HWND_BROADCAST, WM_SETTINGCHANGE, 0,
u'Environment', SMTO_ABORTIFHUNG, 5000, ctypes.byref(result))
|
ndtran/compassion-modules
|
refs/heads/master
|
contract_compassion/tests/__init__.py
|
2
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Albert SHENOUDA <[email protected]>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from . import test_contract_compassion
|
poojavade/Genomics_Docker
|
refs/heads/master
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/Pillow-2.3.0-py2.7-linux-x86_64.egg/PIL/WebPImagePlugin.py
|
7
|
from PIL import Image
from PIL import ImageFile
from io import BytesIO
from PIL import _webp
_VALID_WEBP_MODES = {
"RGB": True,
"RGBA": True,
}
_VP8_MODES_BY_IDENTIFIER = {
b"VP8 ": "RGB",
b"VP8X": "RGBA",
b"VP8L": "RGBA", # lossless
}
def _accept(prefix):
is_riff_file_format = prefix[:4] == b"RIFF"
is_webp_file = prefix[8:12] == b"WEBP"
is_valid_vp8_mode = prefix[12:16] in _VP8_MODES_BY_IDENTIFIER
return is_riff_file_format and is_webp_file and is_valid_vp8_mode
class WebPImageFile(ImageFile.ImageFile):
format = "WEBP"
format_description = "WebP image"
def _open(self):
data, width, height, self.mode, icc_profile, exif = _webp.WebPDecode(self.fp.read())
self.info["icc_profile"] = icc_profile
self.info["exif"] = exif
self.size = width, height
self.fp = BytesIO(data)
self.tile = [("raw", (0, 0) + self.size, 0, self.mode)]
def _getexif(self):
from PIL.JpegImagePlugin import _getexif
return _getexif(self)
def _save(im, fp, filename):
image_mode = im.mode
if im.mode not in _VALID_WEBP_MODES:
raise IOError("cannot write mode %s as WEBP" % image_mode)
lossless = im.encoderinfo.get("lossless", False)
quality = im.encoderinfo.get("quality", 80)
icc_profile = im.encoderinfo.get("icc_profile", "")
exif = im.encoderinfo.get("exif", "")
data = _webp.WebPEncode(
im.tobytes(),
im.size[0],
im.size[1],
lossless,
float(quality),
im.mode,
icc_profile,
exif
)
if data is None:
raise IOError("cannot write file as WEBP (encoder returned None)")
fp.write(data)
Image.register_open("WEBP", WebPImageFile, _accept)
Image.register_save("WEBP", _save)
Image.register_extension("WEBP", ".webp")
Image.register_mime("WEBP", "image/webp")
|
gauravbose/digital-menu
|
refs/heads/master
|
tests/schema/tests.py
|
6
|
import datetime
import itertools
import unittest
from copy import copy
from django.db import (
DatabaseError, IntegrityError, OperationalError, connection,
)
from django.db.models import Model
from django.db.models.fields import (
BigIntegerField, BinaryField, BooleanField, CharField, DateTimeField,
IntegerField, PositiveIntegerField, SlugField, TextField,
)
from django.db.models.fields.related import (
ForeignKey, ManyToManyField, OneToOneField,
)
from django.db.transaction import atomic
from django.test import TransactionTestCase, skipIfDBFeature
from .fields import CustomManyToManyField, InheritedManyToManyField
from .models import (
Author, AuthorWithDefaultHeight, AuthorWithEvenLongerName, Book, BookWeak,
BookWithLongName, BookWithO2O, BookWithSlug, Note, Tag, TagIndexed,
TagM2MTest, TagUniqueRename, Thing, UniqueTest, new_apps,
)
class SchemaTests(TransactionTestCase):
"""
Tests that the schema-alteration code works correctly.
Be aware that these tests are more liable than most to false results,
as sometimes the code to check if a test has worked is almost as complex
as the code it is testing.
"""
available_apps = []
models = [
Author, AuthorWithDefaultHeight, AuthorWithEvenLongerName, Book,
BookWeak, BookWithLongName, BookWithO2O, BookWithSlug, Note, Tag,
TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest,
]
# Utility functions
def setUp(self):
# local_models should contain test dependent model classes that will be
# automatically removed from the app cache on test tear down.
self.local_models = []
def tearDown(self):
# Delete any tables made for our models
self.delete_tables()
new_apps.clear_cache()
for model in new_apps.get_models():
model._meta._expire_cache()
if 'schema' in new_apps.all_models:
for model in self.local_models:
del new_apps.all_models['schema'][model._meta.model_name]
def delete_tables(self):
"Deletes all model tables for our models for a clean test environment"
with connection.cursor() as cursor:
connection.disable_constraint_checking()
table_names = connection.introspection.table_names(cursor)
for model in itertools.chain(SchemaTests.models, self.local_models):
# Remove any M2M tables first
for field in model._meta.local_many_to_many:
with atomic():
tbl = field.rel.through._meta.db_table
if tbl in table_names:
cursor.execute(connection.schema_editor().sql_delete_table % {
"table": connection.ops.quote_name(tbl),
})
table_names.remove(tbl)
# Then remove the main tables
with atomic():
tbl = model._meta.db_table
if tbl in table_names:
cursor.execute(connection.schema_editor().sql_delete_table % {
"table": connection.ops.quote_name(tbl),
})
table_names.remove(tbl)
connection.enable_constraint_checking()
def column_classes(self, model):
with connection.cursor() as cursor:
columns = {
d[0]: (connection.introspection.get_field_type(d[1], d), d)
for d in connection.introspection.get_table_description(
cursor,
model._meta.db_table,
)
}
# SQLite has a different format for field_type
for name, (type, desc) in columns.items():
if isinstance(type, tuple):
columns[name] = (type[0], desc)
# SQLite also doesn't error properly
if not columns:
raise DatabaseError("Table does not exist (empty pragma)")
return columns
def get_indexes(self, table):
"""
Get the indexes on the table using a new cursor.
"""
with connection.cursor() as cursor:
return connection.introspection.get_indexes(cursor, table)
def get_constraints(self, table):
"""
Get the constraints on a table using a new cursor.
"""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
# Tests
def test_creation_deletion(self):
"""
Tries creating a model's table, and then deleting it.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Check that it's there
list(Author.objects.all())
# Clean up that table
with connection.schema_editor() as editor:
editor.delete_model(Author)
# Check that it's gone
self.assertRaises(
DatabaseError,
lambda: list(Author.objects.all()),
)
@unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support")
def test_fk(self):
"Tests that creating tables out of FK order, then repointing, works"
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Book)
editor.create_model(Author)
editor.create_model(Tag)
# Check that initial tables are there
list(Author.objects.all())
list(Book.objects.all())
# Make sure the FK constraint is present
with self.assertRaises(IntegrityError):
Book.objects.create(
author_id=1,
title="Much Ado About Foreign Keys",
pub_date=datetime.datetime.now(),
)
# Repoint the FK constraint
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Tag)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
# Make sure the new FK constraint is present
constraints = self.get_constraints(Book._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["author_id"] and details['foreign_key']:
self.assertEqual(details['foreign_key'], ('schema_tag', 'id'))
break
else:
self.fail("No FK constraint for author_id found")
@unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support")
def test_fk_db_constraint(self):
"Tests that the db_constraint parameter is respected"
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(Author)
editor.create_model(BookWeak)
# Check that initial tables are there
list(Author.objects.all())
list(Tag.objects.all())
list(BookWeak.objects.all())
# Check that BookWeak doesn't have an FK constraint
constraints = self.get_constraints(BookWeak._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["author_id"] and details['foreign_key']:
self.fail("FK constraint for author_id found")
# Make a db_constraint=False FK
new_field = ForeignKey(Tag, db_constraint=False)
new_field.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Make sure no FK constraint is present
constraints = self.get_constraints(Author._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["tag_id"] and details['foreign_key']:
self.fail("FK constraint for tag_id found")
# Alter to one with a constraint
new_field2 = ForeignKey(Tag)
new_field2.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
# Make sure the new FK constraint is present
constraints = self.get_constraints(Author._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["tag_id"] and details['foreign_key']:
self.assertEqual(details['foreign_key'], ('schema_tag', 'id'))
break
else:
self.fail("No FK constraint for tag_id found")
# Alter to one without a constraint again
new_field2 = ForeignKey(Tag)
new_field2.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field2, new_field, strict=True)
# Make sure no FK constraint is present
constraints = self.get_constraints(Author._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["tag_id"] and details['foreign_key']:
self.fail("FK constraint for tag_id found")
def _test_m2m_db_constraint(self, M2MFieldClass):
class LocalAuthorWithM2M(Model):
name = CharField(max_length=255)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalAuthorWithM2M]
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(LocalAuthorWithM2M)
# Check that initial tables are there
list(LocalAuthorWithM2M.objects.all())
list(Tag.objects.all())
# Make a db_constraint=False FK
new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False)
new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
# Add the field
with connection.schema_editor() as editor:
editor.add_field(LocalAuthorWithM2M, new_field)
# Make sure no FK constraint is present
constraints = self.get_constraints(new_field.rel.through._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["tag_id"] and details['foreign_key']:
self.fail("FK constraint for tag_id found")
@unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support")
def test_m2m_db_constraint(self):
self._test_m2m_db_constraint(ManyToManyField)
@unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support")
def test_m2m_db_constraint_custom(self):
self._test_m2m_db_constraint(CustomManyToManyField)
@unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support")
def test_m2m_db_constraint_inherited(self):
self._test_m2m_db_constraint(InheritedManyToManyField)
def test_add_field(self):
"""
Tests adding fields to models
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add the new field
new_field = IntegerField(null=True)
new_field.set_attributes_from_name("age")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['age'][0], "IntegerField")
self.assertEqual(columns['age'][1][6], True)
def test_add_field_temp_default(self):
"""
Tests adding fields to models with a temporary default
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add a not-null field
new_field = CharField(max_length=30, default="Godwin")
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['surname'][0], "CharField")
self.assertEqual(columns['surname'][1][6],
connection.features.interprets_empty_strings_as_nulls)
def test_add_field_temp_default_boolean(self):
"""
Tests adding fields to models with a temporary default where
the default is False. (#21783)
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add a not-null field
new_field = BooleanField(default=False)
new_field.set_attributes_from_name("awesome")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
# BooleanField are stored as TINYINT(1) on MySQL.
field_type = columns['awesome'][0]
self.assertEqual(field_type, connection.features.introspected_boolean_field_type(new_field, created_separately=True))
def test_add_field_default_transform(self):
"""
Tests adding fields to models with a default that is not directly
valid in the database (#22581)
"""
class TestTransformField(IntegerField):
# Weird field that saves the count of items in its value
def get_default(self):
return self.default
def get_prep_value(self, value):
if value is None:
return 0
return len(value)
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add the field with a default it needs to cast (to string in this case)
new_field = TestTransformField(default={1: 2})
new_field.set_attributes_from_name("thing")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is there
columns = self.column_classes(Author)
field_type, field_info = columns['thing']
self.assertEqual(field_type, 'IntegerField')
# Make sure the values were transformed correctly
self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2)
def test_add_field_binary(self):
"""
Tests binary fields get a sane default (#22851)
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the new field
new_field = BinaryField(blank=True)
new_field.set_attributes_from_name("bits")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
# MySQL annoyingly uses the same backend, so it'll come back as one of
# these two types.
self.assertIn(columns['bits'][0], ("BinaryField", "TextField"))
def test_alter(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls))
# Alter the name field to a TextField
old_field = Author._meta.get_field("name")
new_field = TextField(null=True)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "TextField")
self.assertEqual(columns['name'][1][6], True)
# Change nullability again
new_field2 = TextField(null=False)
new_field2.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "TextField")
self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls))
def test_alter_text_field(self):
# Regression for "BLOB/TEXT column 'info' can't have a default value")
# on MySQL.
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Note)
old_field = Note._meta.get_field("info")
new_field = TextField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
@skipIfDBFeature('interprets_empty_strings_as_nulls')
def test_alter_textual_field_keep_null_status(self):
"""
Changing a field type shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
old_field = Note._meta.get_field("info")
new_field = CharField(max_length=50)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
def test_alter_numeric_field_keep_null_status(self):
"""
Changing a field type shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(UniqueTest)
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=None, slug='aaa')
old_field = UniqueTest._meta.get_field("year")
new_field = BigIntegerField()
new_field.set_attributes_from_name("year")
with connection.schema_editor() as editor:
editor.alter_field(UniqueTest, old_field, new_field, strict=True)
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=None, slug='bbb')
def test_alter_null_to_not_null(self):
"""
#23609 - Tests handling of default values when altering from NULL to NOT NULL.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertTrue(columns['height'][1][6])
# Create some test data
Author.objects.create(name='Not null author', height=12)
Author.objects.create(name='Null author')
# Verify null value
self.assertEqual(Author.objects.get(name='Not null author').height, 12)
self.assertIsNone(Author.objects.get(name='Null author').height)
# Alter the height field to NOT NULL with default
old_field = Author._meta.get_field("height")
new_field = PositiveIntegerField(default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertFalse(columns['height'][1][6])
# Verify default value
self.assertEqual(Author.objects.get(name='Not null author').height, 12)
self.assertEqual(Author.objects.get(name='Null author').height, 42)
def test_alter_charfield_to_null(self):
"""
#24307 - Should skip an alter statement on databases with
interprets_empty_strings_as_null when changing a CharField to null.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change the CharField to null
old_field = Author._meta.get_field('name')
new_field = copy(old_field)
new_field.null = True
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field)
def test_alter_textfield_to_null(self):
"""
#24307 - Should skip an alter statement on databases with
interprets_empty_strings_as_null when changing a TextField to null.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Note)
# Change the TextField to null
old_field = Note._meta.get_field('info')
new_field = copy(old_field)
new_field.null = True
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field)
@unittest.skipUnless(connection.features.supports_combined_alters, "No combined ALTER support")
def test_alter_null_to_not_null_keeping_default(self):
"""
#23738 - Can change a nullable field with default to non-nullable
with the same default.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(AuthorWithDefaultHeight)
# Ensure the field is right to begin with
columns = self.column_classes(AuthorWithDefaultHeight)
self.assertTrue(columns['height'][1][6])
# Alter the height field to NOT NULL keeping the previous default
old_field = AuthorWithDefaultHeight._meta.get_field("height")
new_field = PositiveIntegerField(default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithDefaultHeight, old_field, new_field)
# Ensure the field is right afterwards
columns = self.column_classes(AuthorWithDefaultHeight)
self.assertFalse(columns['height'][1][6])
@unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support")
def test_alter_fk(self):
"""
Tests altering of FKs
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the field is right to begin with
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], "IntegerField")
# Make sure the FK constraint is present
constraints = self.get_constraints(Book._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["author_id"] and details['foreign_key']:
self.assertEqual(details['foreign_key'], ('schema_author', 'id'))
break
else:
self.fail("No FK constraint for author_id found")
# Alter the FK
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Author, editable=False)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], "IntegerField")
# Make sure the FK constraint is present
constraints = self.get_constraints(Book._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["author_id"] and details['foreign_key']:
self.assertEqual(details['foreign_key'], ('schema_author', 'id'))
break
else:
self.fail("No FK constraint for author_id found")
@unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support")
def test_alter_to_fk(self):
"""
#24447 - Tests adding a FK constraint for an existing column
"""
class LocalBook(Model):
author = IntegerField()
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalBook]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalBook)
# Ensure no FK constraint exists
constraints = self.get_constraints(LocalBook._meta.db_table)
for name, details in constraints.items():
if details['foreign_key']:
self.fail('Found an unexpected FK constraint to %s' % details['columns'])
old_field = LocalBook._meta.get_field("author")
new_field = ForeignKey(Author)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(LocalBook, old_field, new_field, strict=True)
constraints = self.get_constraints(LocalBook._meta.db_table)
# Ensure FK constraint exists
for name, details in constraints.items():
if details['foreign_key'] and details['columns'] == ["author_id"]:
self.assertEqual(details['foreign_key'], ('schema_author', 'id'))
break
else:
self.fail("No FK constraint for author_id found")
@unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support")
def test_alter_o2o_to_fk(self):
"""
#24163 - Tests altering of OneToOneField to ForeignKey
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
# Ensure the field is right to begin with
columns = self.column_classes(BookWithO2O)
self.assertEqual(columns['author_id'][0], "IntegerField")
# Ensure the field is unique
author = Author.objects.create(name="Joe")
BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
with self.assertRaises(IntegrityError):
BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
BookWithO2O.objects.all().delete()
# Make sure the FK constraint is present
constraints = self.get_constraints(BookWithO2O._meta.db_table)
author_is_fk = False
for name, details in constraints.items():
if details['columns'] == ['author_id']:
if details['foreign_key'] and details['foreign_key'] == ('schema_author', 'id'):
author_is_fk = True
self.assertTrue(author_is_fk, "No FK constraint for author_id found")
# Alter the OneToOneField to ForeignKey
old_field = BookWithO2O._meta.get_field("author")
new_field = ForeignKey(Author)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], "IntegerField")
# Ensure the field is not unique anymore
Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
# Make sure the FK constraint is still present
constraints = self.get_constraints(Book._meta.db_table)
author_is_fk = False
for name, details in constraints.items():
if details['columns'] == ['author_id']:
if details['foreign_key'] and details['foreign_key'] == ('schema_author', 'id'):
author_is_fk = True
self.assertTrue(author_is_fk, "No FK constraint for author_id found")
@unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support")
def test_alter_fk_to_o2o(self):
"""
#24163 - Tests altering of ForeignKey to OneToOneField
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the field is right to begin with
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], "IntegerField")
# Ensure the field is not unique
author = Author.objects.create(name="Joe")
Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
Book.objects.all().delete()
# Make sure the FK constraint is present
constraints = self.get_constraints(Book._meta.db_table)
author_is_fk = False
for name, details in constraints.items():
if details['columns'] == ['author_id']:
if details['foreign_key'] and details['foreign_key'] == ('schema_author', 'id'):
author_is_fk = True
self.assertTrue(author_is_fk, "No FK constraint for author_id found")
# Alter the ForeignKey to OneToOneField
old_field = Book._meta.get_field("author")
new_field = OneToOneField(Author)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(BookWithO2O)
self.assertEqual(columns['author_id'][0], "IntegerField")
# Ensure the field is unique now
BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
with self.assertRaises(IntegrityError):
BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
# Make sure the FK constraint is present
constraints = self.get_constraints(BookWithO2O._meta.db_table)
author_is_fk = False
for name, details in constraints.items():
if details['columns'] == ['author_id']:
if details['foreign_key'] and details['foreign_key'] == ('schema_author', 'id'):
author_is_fk = True
self.assertTrue(author_is_fk, "No FK constraint for author_id found")
def test_alter_implicit_id_to_explicit(self):
"""
Should be able to convert an implicit "id" field to an explicit "id"
primary key field.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("id")
new_field = IntegerField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# This will fail if DROP DEFAULT is inadvertently executed on this
# field which drops the id sequence, at least on PostgreSQL.
Author.objects.create(name='Foo')
def test_rename(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
self.assertNotIn("display_name", columns)
# Alter the name field's name
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=254)
new_field.set_attributes_from_name("display_name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['display_name'][0], "CharField")
self.assertNotIn("name", columns)
def _test_m2m_create(self, M2MFieldClass):
"""
Tests M2M fields on models during creation
"""
class LocalBookWithM2M(Model):
author = ForeignKey(Author)
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
tags = M2MFieldClass("TagM2MTest", related_name="books")
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [
LocalBookWithM2M,
LocalBookWithM2M._meta.get_field('tags').rel.through,
]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(TagM2MTest)
editor.create_model(LocalBookWithM2M)
# Ensure there is now an m2m table there
columns = self.column_classes(LocalBookWithM2M._meta.get_field("tags").rel.through)
self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField")
def test_m2m_create(self):
self._test_m2m_create(ManyToManyField)
def test_m2m_create_custom(self):
self._test_m2m_create(CustomManyToManyField)
def test_m2m_create_inherited(self):
self._test_m2m_create(InheritedManyToManyField)
def _test_m2m_create_through(self, M2MFieldClass):
"""
Tests M2M fields on models during creation with through models
"""
class LocalTagThrough(Model):
book = ForeignKey("schema.LocalBookWithM2MThrough")
tag = ForeignKey("schema.TagM2MTest")
class Meta:
app_label = 'schema'
apps = new_apps
class LocalBookWithM2MThrough(Model):
tags = M2MFieldClass("TagM2MTest", related_name="books", through=LocalTagThrough)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalTagThrough, LocalBookWithM2MThrough]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalTagThrough)
editor.create_model(TagM2MTest)
editor.create_model(LocalBookWithM2MThrough)
# Ensure there is now an m2m table there
columns = self.column_classes(LocalTagThrough)
self.assertEqual(columns['book_id'][0], "IntegerField")
self.assertEqual(columns['tag_id'][0], "IntegerField")
def test_m2m_create_through(self):
self._test_m2m_create_through(ManyToManyField)
def test_m2m_create_through_custom(self):
self._test_m2m_create_through(CustomManyToManyField)
def test_m2m_create_through_inherited(self):
self._test_m2m_create_through(InheritedManyToManyField)
def _test_m2m(self, M2MFieldClass):
"""
Tests adding/removing M2M fields on models
"""
class LocalAuthorWithM2M(Model):
name = CharField(max_length=255)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalAuthorWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalAuthorWithM2M)
editor.create_model(TagM2MTest)
# Create an M2M field
new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors")
new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
self.local_models += [new_field.rel.through]
# Ensure there's no m2m table there
self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through)
# Add the field
with connection.schema_editor() as editor:
editor.add_field(LocalAuthorWithM2M, new_field)
# Ensure there is now an m2m table there
columns = self.column_classes(new_field.rel.through)
self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField")
# "Alter" the field. This should not rename the DB table to itself.
with connection.schema_editor() as editor:
editor.alter_field(LocalAuthorWithM2M, new_field, new_field)
# Remove the M2M table again
with connection.schema_editor() as editor:
editor.remove_field(LocalAuthorWithM2M, new_field)
# Ensure there's no m2m table there
self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through)
def test_m2m(self):
self._test_m2m(ManyToManyField)
def test_m2m_custom(self):
self._test_m2m(CustomManyToManyField)
def test_m2m_inherited(self):
self._test_m2m(InheritedManyToManyField)
def _test_m2m_through_alter(self, M2MFieldClass):
"""
Tests altering M2Ms with explicit through models (should no-op)
"""
class LocalAuthorTag(Model):
author = ForeignKey("schema.LocalAuthorWithM2MThrough")
tag = ForeignKey("schema.TagM2MTest")
class Meta:
app_label = 'schema'
apps = new_apps
class LocalAuthorWithM2MThrough(Model):
name = CharField(max_length=255)
tags = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalAuthorTag, LocalAuthorWithM2MThrough]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalAuthorTag)
editor.create_model(LocalAuthorWithM2MThrough)
editor.create_model(TagM2MTest)
# Ensure the m2m table is there
self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
# "Alter" the field's blankness. This should not actually do anything.
old_field = LocalAuthorWithM2MThrough._meta.get_field("tags")
new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag)
new_field.contribute_to_class(LocalAuthorWithM2MThrough, "tags")
with connection.schema_editor() as editor:
editor.alter_field(LocalAuthorWithM2MThrough, old_field, new_field)
# Ensure the m2m table is still there
self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
def test_m2m_through_alter(self):
self._test_m2m_through_alter(ManyToManyField)
def test_m2m_through_alter_custom(self):
self._test_m2m_through_alter(CustomManyToManyField)
def test_m2m_through_alter_inherited(self):
self._test_m2m_through_alter(InheritedManyToManyField)
def _test_m2m_repoint(self, M2MFieldClass):
"""
Tests repointing M2M fields
"""
class LocalBookWithM2M(Model):
author = ForeignKey(Author)
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
tags = M2MFieldClass("TagM2MTest", related_name="books")
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [
LocalBookWithM2M,
LocalBookWithM2M._meta.get_field('tags').rel.through,
]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalBookWithM2M)
editor.create_model(TagM2MTest)
editor.create_model(UniqueTest)
# Ensure the M2M exists and points to TagM2MTest
constraints = self.get_constraints(LocalBookWithM2M._meta.get_field("tags").rel.through._meta.db_table)
if connection.features.supports_foreign_keys:
for name, details in constraints.items():
if details['columns'] == ["tagm2mtest_id"] and details['foreign_key']:
self.assertEqual(details['foreign_key'], ('schema_tagm2mtest', 'id'))
break
else:
self.fail("No FK constraint for tagm2mtest_id found")
# Repoint the M2M
old_field = LocalBookWithM2M._meta.get_field("tags")
new_field = M2MFieldClass(UniqueTest)
new_field.contribute_to_class(LocalBookWithM2M, "uniques")
self.local_models += [new_field.rel.through]
with connection.schema_editor() as editor:
editor.alter_field(LocalBookWithM2M, old_field, new_field)
# Ensure old M2M is gone
self.assertRaises(DatabaseError, self.column_classes, LocalBookWithM2M._meta.get_field("tags").rel.through)
# Ensure the new M2M exists and points to UniqueTest
constraints = self.get_constraints(new_field.rel.through._meta.db_table)
if connection.features.supports_foreign_keys:
for name, details in constraints.items():
if details['columns'] == ["uniquetest_id"] and details['foreign_key']:
self.assertEqual(details['foreign_key'], ('schema_uniquetest', 'id'))
break
else:
self.fail("No FK constraint for uniquetest_id found")
def test_m2m_repoint(self):
self._test_m2m_repoint(ManyToManyField)
def test_m2m_repoint_custom(self):
self._test_m2m_repoint(CustomManyToManyField)
def test_m2m_repoint_inherited(self):
self._test_m2m_repoint(InheritedManyToManyField)
@unittest.skipUnless(connection.features.supports_column_check_constraints, "No check constraints")
def test_check_constraints(self):
"""
Tests creating/deleting CHECK constraints
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the constraint exists
constraints = self.get_constraints(Author._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["height"] and details['check']:
break
else:
self.fail("No check constraint for height found")
# Alter the column to remove it
old_field = Author._meta.get_field("height")
new_field = IntegerField(null=True, blank=True)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["height"] and details['check']:
self.fail("Check constraint for height found")
# Alter the column to re-add it
new_field2 = Author._meta.get_field("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["height"] and details['check']:
break
else:
self.fail("No check constraint for height found")
def test_unique(self):
"""
Tests removing and adding unique constraints to a single column.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the field is unique to begin with
Tag.objects.create(title="foo", slug="foo")
self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be non-unique
old_field = Tag._meta.get_field("slug")
new_field = SlugField(unique=False)
new_field.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, old_field, new_field, strict=True)
# Ensure the field is no longer unique
Tag.objects.create(title="foo", slug="foo")
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be unique
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field, new_field2, strict=True)
# Ensure the field is unique again
Tag.objects.create(title="foo", slug="foo")
self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo")
Tag.objects.all().delete()
# Rename the field
new_field3 = SlugField(unique=True)
new_field3.set_attributes_from_name("slug2")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field2, new_field3, strict=True)
# Ensure the field is still unique
TagUniqueRename.objects.create(title="foo", slug2="foo")
self.assertRaises(IntegrityError, TagUniqueRename.objects.create, title="bar", slug2="foo")
Tag.objects.all().delete()
def test_unique_together(self):
"""
Tests removing and adding unique_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(UniqueTest)
# Ensure the fields are unique to begin with
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2011, slug="foo")
UniqueTest.objects.create(year=2011, slug="bar")
self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter the model to its non-unique-together companion
with connection.schema_editor() as editor:
editor.alter_unique_together(UniqueTest, UniqueTest._meta.unique_together, [])
# Ensure the fields are no longer unique
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter it back
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_unique_together(UniqueTest, [], UniqueTest._meta.unique_together)
# Ensure the fields are unique again
UniqueTest.objects.create(year=2012, slug="foo")
self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo")
UniqueTest.objects.all().delete()
def test_unique_together_with_fk(self):
"""
Tests removing and adding unique_together constraints that include
a foreign key.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the fields are unique to begin with
self.assertEqual(Book._meta.unique_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [], [['author', 'title']])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [['author', 'title']], [])
def test_index_together(self):
"""
Tests removing and adding index_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure there's no index on the year/slug columns first
self.assertEqual(
False,
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c['columns'] == ["slug", "title"]
),
)
# Alter the model to add an index
with connection.schema_editor() as editor:
editor.alter_index_together(Tag, [], [("slug", "title")])
# Ensure there is now an index
self.assertEqual(
True,
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c['columns'] == ["slug", "title"]
),
)
# Alter it back
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_index_together(Tag, [("slug", "title")], [])
# Ensure there's no index
self.assertEqual(
False,
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c['columns'] == ["slug", "title"]
),
)
def test_index_together_with_fk(self):
"""
Tests removing and adding index_together constraints that include
a foreign key.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the fields are unique to begin with
self.assertEqual(Book._meta.index_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_index_together(Book, [], [['author', 'title']])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_index_together(Book, [['author', 'title']], [])
def test_create_index_together(self):
"""
Tests creating models with index_together already defined
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(TagIndexed)
# Ensure there is an index
self.assertEqual(
True,
any(
c["index"]
for c in self.get_constraints("schema_tagindexed").values()
if c['columns'] == ["slug", "title"]
),
)
def test_db_table(self):
"""
Tests renaming of the table
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the table is there to begin with
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
# Alter the table
with connection.schema_editor() as editor:
editor.alter_db_table(Author, "schema_author", "schema_otherauthor")
# Ensure the table is there afterwards
Author._meta.db_table = "schema_otherauthor"
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
# Alter the table again
with connection.schema_editor() as editor:
editor.alter_db_table(Author, "schema_otherauthor", "schema_author")
# Ensure the table is still there
Author._meta.db_table = "schema_author"
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
def test_indexes(self):
"""
Tests creation/altering of indexes
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the table is there and has the right index
self.assertIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Alter to remove the index
old_field = Book._meta.get_field("title")
new_field = CharField(max_length=100, db_index=False)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
# Ensure the table is there and has no index
self.assertNotIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Alter to re-add the index
new_field2 = Book._meta.get_field("title")
with connection.schema_editor() as editor:
editor.alter_field(Book, new_field, new_field2, strict=True)
# Ensure the table is there and has the index again
self.assertIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Add a unique column, verify that creates an implicit index
new_field3 = BookWithSlug._meta.get_field("slug")
with connection.schema_editor() as editor:
editor.add_field(Book, new_field3)
self.assertIn(
"slug",
self.get_indexes(Book._meta.db_table),
)
# Remove the unique, check the index goes with it
new_field4 = CharField(max_length=20, unique=False)
new_field4.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(BookWithSlug, new_field3, new_field4, strict=True)
self.assertNotIn(
"slug",
self.get_indexes(Book._meta.db_table),
)
def test_primary_key(self):
"""
Tests altering of the primary key
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the table is there and has the right PK
self.assertTrue(
self.get_indexes(Tag._meta.db_table)['id']['primary_key'],
)
# Alter to change the PK
id_field = Tag._meta.get_field("id")
old_field = Tag._meta.get_field("slug")
new_field = SlugField(primary_key=True)
new_field.set_attributes_from_name("slug")
new_field.model = Tag
with connection.schema_editor() as editor:
editor.remove_field(Tag, id_field)
editor.alter_field(Tag, old_field, new_field)
# Ensure the PK changed
self.assertNotIn(
'id',
self.get_indexes(Tag._meta.db_table),
)
self.assertTrue(
self.get_indexes(Tag._meta.db_table)['slug']['primary_key'],
)
def test_context_manager_exit(self):
"""
Ensures transaction is correctly closed when an error occurs
inside a SchemaEditor context.
"""
class SomeError(Exception):
pass
try:
with connection.schema_editor():
raise SomeError
except SomeError:
self.assertFalse(connection.in_atomic_block)
@unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support")
def test_foreign_key_index_long_names_regression(self):
"""
Regression test for #21497.
Only affects databases that supports foreign keys.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(AuthorWithEvenLongerName)
editor.create_model(BookWithLongName)
# Find the properly shortened column name
column_name = connection.ops.quote_name("author_foreign_key_with_really_long_field_name_id")
column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase
# Ensure the table is there and has an index on the column
self.assertIn(
column_name,
self.get_indexes(BookWithLongName._meta.db_table),
)
@unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support")
def test_add_foreign_key_long_names(self):
"""
Regression test for #23009.
Only affects databases that supports foreign keys.
"""
# Create the initial tables
with connection.schema_editor() as editor:
editor.create_model(AuthorWithEvenLongerName)
editor.create_model(BookWithLongName)
# Add a second FK, this would fail due to long ref name before the fix
new_field = ForeignKey(AuthorWithEvenLongerName, related_name="something")
new_field.set_attributes_from_name("author_other_really_long_named_i_mean_so_long_fk")
with connection.schema_editor() as editor:
editor.add_field(BookWithLongName, new_field)
def test_creation_deletion_reserved_names(self):
"""
Tries creating a model's table, and then deleting it when it has a
SQL reserved name.
"""
# Create the table
with connection.schema_editor() as editor:
try:
editor.create_model(Thing)
except OperationalError as e:
self.fail("Errors when applying initial migration for a model "
"with a table named after a SQL reserved word: %s" % e)
# Check that it's there
list(Thing.objects.all())
# Clean up that table
with connection.schema_editor() as editor:
editor.delete_model(Thing)
# Check that it's gone
self.assertRaises(
DatabaseError,
lambda: list(Thing.objects.all()),
)
@unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support")
def test_remove_constraints_capital_letters(self):
"""
#23065 - Constraint names must be quoted if they contain capital letters.
"""
def get_field(*args, **kwargs):
kwargs['db_column'] = "CamelCase"
field = kwargs.pop('field_class', IntegerField)(*args, **kwargs)
field.set_attributes_from_name("CamelCase")
return field
model = Author
field = get_field()
table = model._meta.db_table
column = field.column
with connection.schema_editor() as editor:
editor.create_model(model)
editor.add_field(model, field)
editor.execute(
editor.sql_create_index % {
"table": editor.quote_name(table),
"name": editor.quote_name("CamelCaseIndex"),
"columns": editor.quote_name(column),
"extra": "",
}
)
editor.alter_field(model, get_field(db_index=True), field)
editor.execute(
editor.sql_create_unique % {
"table": editor.quote_name(table),
"name": editor.quote_name("CamelCaseUniqConstraint"),
"columns": editor.quote_name(field.column),
}
)
editor.alter_field(model, get_field(unique=True), field)
editor.execute(
editor.sql_create_fk % {
"table": editor.quote_name(table),
"name": editor.quote_name("CamelCaseFKConstraint"),
"column": editor.quote_name(column),
"to_table": editor.quote_name(table),
"to_column": editor.quote_name(model._meta.auto_field.column),
}
)
editor.alter_field(model, get_field(Author, field_class=ForeignKey), field)
def test_add_field_use_effective_default(self):
"""
#23987 - effective_default() should be used as the field default when
adding a new field.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no surname field
columns = self.column_classes(Author)
self.assertNotIn("surname", columns)
# Create a row
Author.objects.create(name='Anonymous1')
# Add new CharField to ensure default will be used from effective_default
new_field = CharField(max_length=15, blank=True)
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure field was added with the right default
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertEqual(item[0], None if connection.features.interprets_empty_strings_as_nulls else '')
|
michalkurka/h2o-3
|
refs/heads/master
|
h2o-py/tests/testdir_misc/pyunit_melt.py
|
2
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""Pyunit for h2o.melt"""
from __future__ import print_function
import sys
sys.path.insert(1,"../../")
import pandas as pd
from h2o.frame import H2OFrame
from tests import pyunit_utils
def melt_compare(df, **kwargs):
frozen_h2o = H2OFrame(df)
melted_h2o = frozen_h2o.melt(**kwargs)
def sort(f):
var_name = kwargs["var_name"] if "var_name" in kwargs else "variable"
return f.sort_values(by=kwargs["id_vars"]+[var_name]).reset_index(drop=True)
actual = sort(melted_h2o.as_data_frame())
expected = sort(pd.melt(df, **kwargs))
assert expected.equals(actual)
def test_melt():
df = pd.DataFrame({'A': {0: 'a', 1: 'b', 2: 'c'},
'B': {0: 1, 2: 5},
'C': {0: 2, 1: 4, 2: 6}})
melt_compare(df, id_vars=["A"], value_vars=["B"])
melt_compare(df, id_vars=["A"], value_vars=["B", "C"])
melt_compare(df, id_vars=["A"])
melt_compare(df, id_vars=["A", "B"], value_vars=["C"])
melt_compare(df, id_vars=["A"], value_vars=["B"], var_name="test_VARIABLE", value_name="test_VALUE")
if __name__ == "__main__":
pyunit_utils.standalone_test(test_melt)
else:
test_melt()
|
edx/xblock-sdk
|
refs/heads/master
|
setup.py
|
2
|
"""Set up for XBlock SDK"""
import os
import os.path
from setuptools import setup
def find_package_data(pkg, data_paths):
"""Generic function to find package_data for `pkg` under `root`."""
data = []
for data_path in data_paths:
package_dir = pkg.replace(".", "/")
for dirname, _, files in os.walk(package_dir + "/" + data_path):
for fname in files:
data.append(os.path.relpath(os.path.join(dirname, fname), package_dir))
return {pkg: data}
def is_requirement(line):
"""
Return True if the requirement line is a package requirement;
that is, it is not blank, a comment, or editable.
"""
# Remove whitespace at the start/end of the line
line = line.strip()
# Skip blank lines, comments, and editable installs
return not (
line == '' or
line.startswith('-r') or
line.startswith('#') or
line.startswith('-e') or
line.startswith('git+')
)
def load_requirements(*requirements_paths):
"""
Load all requirements from the specified requirements files.
Returns a list of requirement strings.
"""
requirements = set()
for path in requirements_paths:
requirements.update(
line.strip() for line in open(path).readlines()
if is_requirement(line)
)
return list(requirements)
package_data = {} # pylint: disable=invalid-name
package_data.update(find_package_data("sample_xblocks.basic", ["public", "templates"]))
package_data.update(find_package_data("sample_xblocks.thumbs", ["static"]))
package_data.update(find_package_data("sample_xblocks.filethumbs", ["static"]))
package_data.update(find_package_data("workbench", ["static", "templates", "test"]))
setup(
name='xblock-sdk',
version='0.3.0',
description='XBlock SDK',
packages=[
'sample_xblocks',
'sample_xblocks.basic',
'sample_xblocks.thumbs',
'sample_xblocks.filethumbs',
'workbench',
],
install_requires=[
"Django>=2.2",
],
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Framework :: Django :: 2.2',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
],
tests_require=load_requirements(f'{os.getcwd()}/requirements/test.txt'),
entry_points={
'xblock.v1': [
# Basic XBlocks
'helloworld_demo = sample_xblocks.basic.content:HelloWorldBlock',
'allscopes_demo = sample_xblocks.basic.content:AllScopesBlock',
'html_demo = sample_xblocks.basic.content:HtmlBlock',
'sequence_demo = sample_xblocks.basic.structure:Sequence',
'vertical_demo = sample_xblocks.basic.structure:VerticalBlock',
'sidebar_demo = sample_xblocks.basic.structure:SidebarBlock',
'problem_demo = sample_xblocks.basic.problem:ProblemBlock',
'textinput_demo = sample_xblocks.basic.problem:TextInputBlock',
'equality_demo = sample_xblocks.basic.problem:EqualityCheckerBlock',
'attempts_scoreboard_demo = sample_xblocks.basic.problem:AttemptsScoreboardBlock',
'slider_demo = sample_xblocks.basic.slider:Slider',
'view_counter_demo = sample_xblocks.basic.view_counter:ViewCounter',
# Thumbs example
'thumbs = sample_xblocks.thumbs:ThumbsBlock',
'filethumbs = sample_xblocks.filethumbs:FileThumbsBlock',
# Workbench specific
'debugchild = workbench.blocks:DebuggingChildBlock',
],
'xblock_asides.v1': [
# ThumbsAside example. Asides aren't ready yet, so we'll disable
# this for now. When we get back to working on asides, we'll come
# up with a more sophisticated mechanism to enable this for the
# developers that want to see it.
# 'thumbs_aside = sample_xblocks.thumbs:ThumbsAside',
]
},
package_data=package_data,
)
|
Delmonte3161/heroes-parser
|
refs/heads/master
|
s2protocol/protocol19458.py
|
28
|
# Copyright (c) 2013 Blizzard Entertainment
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from decoders import *
# Decoding instructions for each protocol type.
typeinfos = [
('_int',[(0,7)]), #0
('_int',[(0,4)]), #1
('_int',[(0,6)]), #2
('_int',[(0,14)]), #3
('_int',[(0,22)]), #4
('_int',[(0,32)]), #5
('_choice',[(0,2),{0:('m_uint6',2),1:('m_uint14',3),2:('m_uint22',4),3:('m_uint32',5)}]), #6
('_int',[(0,5)]), #7
('_struct',[[('m_playerId',7,-1)]]), #8
('_blob',[(0,8)]), #9
('_int',[(0,8)]), #10
('_struct',[[('m_flags',10,0),('m_major',10,1),('m_minor',10,2),('m_revision',10,3),('m_build',5,4),('m_baseBuild',5,5)]]), #11
('_int',[(0,3)]), #12
('_struct',[[('m_signature',9,0),('m_version',11,1),('m_type',12,2),('m_elapsedGameLoops',5,3)]]), #13
('_fourcc',[]), #14
('_blob',[(0,7)]), #15
('_int',[(0,64)]), #16
('_struct',[[('m_region',10,0),('m_programId',14,1),('m_realm',5,2),('m_name',15,3),('m_id',16,4)]]), #17
('_struct',[[('m_a',10,0),('m_r',10,1),('m_g',10,2),('m_b',10,3)]]), #18
('_int',[(0,2)]), #19
('_struct',[[('m_name',9,0),('m_toon',17,1),('m_race',9,2),('m_color',18,3),('m_control',10,4),('m_teamId',1,5),('m_handicap',0,6),('m_observe',19,7),('m_result',19,8)]]), #20
('_array',[(0,5),20]), #21
('_optional',[21]), #22
('_blob',[(0,10)]), #23
('_blob',[(0,11)]), #24
('_struct',[[('m_file',24,0)]]), #25
('_bool',[]), #26
('_int',[(-9223372036854775808,64)]), #27
('_blob',[(0,12)]), #28
('_blob',[(40,0)]), #29
('_array',[(0,4),29]), #30
('_optional',[30]), #31
('_struct',[[('m_playerList',22,0),('m_title',23,1),('m_difficulty',9,2),('m_thumbnail',25,3),('m_isBlizzardMap',26,4),('m_timeUTC',27,5),('m_timeLocalOffset',27,6),('m_description',28,7),('m_imageFilePath',24,8),('m_mapFileName',24,9),('m_cacheHandles',31,10),('m_miniSave',26,11),('m_gameSpeed',12,12),('m_defaultDifficulty',2,13)]]), #32
('_optional',[10]), #33
('_struct',[[('m_race',33,-1)]]), #34
('_struct',[[('m_team',33,-1)]]), #35
('_struct',[[('m_name',9,-7),('m_randomSeed',5,-6),('m_racePreference',34,-5),('m_teamPreference',35,-4),('m_testMap',26,-3),('m_testAuto',26,-2),('m_observe',19,-1)]]), #36
('_array',[(0,5),36]), #37
('_struct',[[('m_lockTeams',26,-11),('m_teamsTogether',26,-10),('m_advancedSharedControl',26,-9),('m_randomRaces',26,-8),('m_battleNet',26,-7),('m_amm',26,-6),('m_ranked',26,-5),('m_noVictoryOrDefeat',26,-4),('m_fog',19,-3),('m_observers',19,-2),('m_userDifficulty',19,-1)]]), #38
('_int',[(1,4)]), #39
('_int',[(1,8)]), #40
('_bitarray',[(0,6)]), #41
('_bitarray',[(0,8)]), #42
('_bitarray',[(0,2)]), #43
('_struct',[[('m_allowedColors',41,-5),('m_allowedRaces',42,-4),('m_allowedDifficulty',41,-3),('m_allowedControls',42,-2),('m_allowedObserveTypes',43,-1)]]), #44
('_array',[(0,5),44]), #45
('_struct',[[('m_randomValue',5,-23),('m_gameCacheName',23,-22),('m_gameOptions',38,-21),('m_gameSpeed',12,-20),('m_gameType',12,-19),('m_maxUsers',7,-18),('m_maxObservers',7,-17),('m_maxPlayers',7,-16),('m_maxTeams',39,-15),('m_maxColors',2,-14),('m_maxRaces',40,-13),('m_maxControls',40,-12),('m_mapSizeX',10,-11),('m_mapSizeY',10,-10),('m_mapFileSyncChecksum',5,-9),('m_mapFileName',24,-8),('m_mapAuthorName',9,-7),('m_modFileSyncChecksum',5,-6),('m_slotDescriptions',45,-5),('m_defaultDifficulty',2,-4),('m_cacheHandles',30,-3),('m_isBlizzardMap',26,-2),('m_isPremadeFFA',26,-1)]]), #46
('_optional',[1]), #47
('_optional',[7]), #48
('_struct',[[('m_color',48,-1)]]), #49
('_array',[(0,5),5]), #50
('_array',[(0,9),5]), #51
('_struct',[[('m_control',10,-11),('m_userId',47,-10),('m_teamId',1,-9),('m_colorPref',49,-8),('m_racePref',34,-7),('m_difficulty',2,-6),('m_handicap',0,-5),('m_observe',19,-4),('m_rewards',50,-3),('m_toonHandle',15,-2),('m_licenses',51,-1)]]), #52
('_array',[(0,5),52]), #53
('_struct',[[('m_phase',12,-9),('m_maxUsers',7,-8),('m_maxObservers',7,-7),('m_slots',53,-6),('m_randomSeed',5,-5),('m_hostUserId',47,-4),('m_isSinglePlayer',26,-3),('m_gameDuration',5,-2),('m_defaultDifficulty',2,-1)]]), #54
('_struct',[[('m_userInitialData',37,-3),('m_gameDescription',46,-2),('m_lobbyState',54,-1)]]), #55
('_struct',[[('m_syncLobbyState',55,-1)]]), #56
('_struct',[[('m_name',15,-1)]]), #57
('_blob',[(0,6)]), #58
('_struct',[[('m_name',58,-1)]]), #59
('_struct',[[('m_name',58,-3),('m_type',5,-2),('m_data',15,-1)]]), #60
('_struct',[[('m_type',5,-3),('m_name',58,-2),('m_data',28,-1)]]), #61
('_array',[(0,5),10]), #62
('_struct',[[('m_signature',62,-1)]]), #63
('_struct',[[('m_developmentCheatsEnabled',26,-4),('m_multiplayerCheatsEnabled',26,-3),('m_syncChecksummingEnabled',26,-2),('m_isMapToMapTransition',26,-1)]]), #64
('_struct',[[]]), #65
('_struct',[[('m_fileName',24,-5),('m_automatic',26,-4),('m_overwrite',26,-3),('m_name',9,-2),('m_description',23,-1)]]), #66
('_int',[(-2147483648,32)]), #67
('_struct',[[('x',67,-2),('y',67,-1)]]), #68
('_struct',[[('m_point',68,-4),('m_time',67,-3),('m_verb',23,-2),('m_arguments',23,-1)]]), #69
('_struct',[[('m_data',69,-1)]]), #70
('_int',[(0,18)]), #71
('_int',[(0,16)]), #72
('_struct',[[('m_abilLink',72,-3),('m_abilCmdIndex',7,-2),('m_abilCmdData',33,-1)]]), #73
('_optional',[73]), #74
('_null',[]), #75
('_int',[(0,20)]), #76
('_struct',[[('x',76,-3),('y',76,-2),('z',67,-1)]]), #77
('_struct',[[('m_targetUnitFlags',10,-7),('m_timer',10,-6),('m_tag',5,-5),('m_snapshotUnitLink',72,-4),('m_snapshotControlPlayerId',47,-3),('m_snapshotUpkeepPlayerId',47,-2),('m_snapshotPoint',77,-1)]]), #78
('_choice',[(0,2),{0:('None',75),1:('TargetPoint',77),2:('TargetUnit',78),3:('Data',5)}]), #79
('_optional',[5]), #80
('_struct',[[('m_cmdFlags',71,-4),('m_abil',74,-3),('m_data',79,-2),('m_otherUnit',80,-1)]]), #81
('_array',[(0,8),10]), #82
('_choice',[(0,2),{0:('None',75),1:('Mask',42),2:('OneIndices',82),3:('ZeroIndices',82)}]), #83
('_struct',[[('m_unitLink',72,-3),('m_intraSubgroupPriority',10,-2),('m_count',10,-1)]]), #84
('_array',[(0,8),84]), #85
('_array',[(0,8),5]), #86
('_struct',[[('m_subgroupIndex',10,-4),('m_removeMask',83,-3),('m_addSubgroups',85,-2),('m_addUnitTags',86,-1)]]), #87
('_struct',[[('m_controlGroupId',1,-2),('m_delta',87,-1)]]), #88
('_struct',[[('m_controlGroupIndex',1,-3),('m_controlGroupUpdate',19,-2),('m_mask',83,-1)]]), #89
('_struct',[[('m_count',10,-6),('m_subgroupCount',10,-5),('m_activeSubgroupIndex',10,-4),('m_unitTagsChecksum',5,-3),('m_subgroupIndicesChecksum',5,-2),('m_subgroupsChecksum',5,-1)]]), #90
('_struct',[[('m_controlGroupId',1,-2),('m_selectionSyncData',90,-1)]]), #91
('_array',[(0,3),67]), #92
('_struct',[[('m_recipientId',1,-2),('m_resources',92,-1)]]), #93
('_struct',[[('m_chatMessage',23,-1)]]), #94
('_int',[(-128,8)]), #95
('_struct',[[('x',67,-3),('y',67,-2),('z',67,-1)]]), #96
('_struct',[[('m_beacon',95,-8),('m_ally',95,-7),('m_autocast',95,-6),('m_targetUnitTag',5,-5),('m_targetUnitSnapshotUnitLink',72,-4),('m_targetUnitSnapshotUpkeepPlayerId',47,-3),('m_targetUnitSnapshotControlPlayerId',47,-2),('m_targetPoint',96,-1)]]), #97
('_struct',[[('m_speed',12,-1)]]), #98
('_struct',[[('m_delta',95,-1)]]), #99
('_struct',[[('m_verb',23,-2),('m_arguments',23,-1)]]), #100
('_struct',[[('m_alliance',5,-2),('m_control',5,-1)]]), #101
('_struct',[[('m_unitTag',5,-1)]]), #102
('_struct',[[('m_unitTag',5,-2),('m_flags',10,-1)]]), #103
('_struct',[[('m_conversationId',67,-2),('m_replyId',67,-1)]]), #104
('_struct',[[('m_purchaseItemId',67,-1)]]), #105
('_struct',[[('m_difficultyLevel',67,-1)]]), #106
('_choice',[(0,3),{0:('None',75),1:('Checked',26),2:('ValueChanged',5),3:('SelectionChanged',67),4:('TextChanged',24)}]), #107
('_struct',[[('m_controlId',67,-3),('m_eventType',67,-2),('m_eventData',107,-1)]]), #108
('_struct',[[('m_soundHash',5,-2),('m_length',5,-1)]]), #109
('_struct',[[('m_soundHash',86,-2),('m_length',86,-1)]]), #110
('_struct',[[('m_syncInfo',110,-1)]]), #111
('_struct',[[('m_sound',5,-1)]]), #112
('_struct',[[('m_transmissionId',67,-1)]]), #113
('_struct',[[('x',72,-2),('y',72,-1)]]), #114
('_optional',[72]), #115
('_struct',[[('m_target',114,-4),('m_distance',115,-3),('m_pitch',115,-2),('m_yaw',115,-1)]]), #116
('_int',[(0,1)]), #117
('_struct',[[('m_skipType',117,-1)]]), #118
('_int',[(0,11)]), #119
('_struct',[[('x',119,-2),('y',119,-1)]]), #120
('_struct',[[('m_button',5,-4),('m_down',26,-3),('m_posUI',120,-2),('m_posWorld',77,-1)]]), #121
('_struct',[[('m_posUI',120,-2),('m_posWorld',77,-1)]]), #122
('_struct',[[('m_soundtrack',5,-1)]]), #123
('_struct',[[('m_planetId',67,-1)]]), #124
('_struct',[[('m_key',95,-2),('m_flags',95,-1)]]), #125
('_struct',[[('m_resources',92,-1)]]), #126
('_struct',[[('m_fulfillRequestId',67,-1)]]), #127
('_struct',[[('m_cancelRequestId',67,-1)]]), #128
('_struct',[[('m_researchItemId',67,-1)]]), #129
('_struct',[[('m_laggingPlayerId',1,-1)]]), #130
('_struct',[[('m_mercenaryId',67,-1)]]), #131
('_struct',[[('m_battleReportId',67,-2),('m_difficultyLevel',67,-1)]]), #132
('_struct',[[('m_battleReportId',67,-1)]]), #133
('_int',[(0,19)]), #134
('_struct',[[('m_decrementMs',134,-1)]]), #135
('_struct',[[('m_portraitId',67,-1)]]), #136
('_struct',[[('m_functionName',15,-1)]]), #137
('_struct',[[('m_result',67,-1)]]), #138
('_struct',[[('m_gameMenuItemIndex',67,-1)]]), #139
('_struct',[[('m_reason',95,-1)]]), #140
('_struct',[[('m_purchaseCategoryId',67,-1)]]), #141
('_struct',[[('m_button',72,-1)]]), #142
('_struct',[[('m_recipient',19,-2),('m_string',24,-1)]]), #143
('_struct',[[('m_recipient',19,-2),('m_point',68,-1)]]), #144
('_struct',[[('m_progress',67,-1)]]), #145
]
# Map from protocol NNet.Game.*Event eventid to (typeid, name)
game_event_types = {
5: (65, 'NNet.Game.SUserFinishedLoadingSyncEvent'),
7: (57, 'NNet.Game.SBankFileEvent'),
8: (59, 'NNet.Game.SBankSectionEvent'),
9: (60, 'NNet.Game.SBankKeyEvent'),
10: (61, 'NNet.Game.SBankValueEvent'),
11: (63, 'NNet.Game.SBankSignatureEvent'),
12: (64, 'NNet.Game.SUserOptionsEvent'),
22: (66, 'NNet.Game.SSaveGameEvent'),
23: (65, 'NNet.Game.SSaveGameDoneEvent'),
25: (65, 'NNet.Game.SPlayerLeaveEvent'),
26: (70, 'NNet.Game.SGameCheatEvent'),
27: (81, 'NNet.Game.SCmdEvent'),
28: (88, 'NNet.Game.SSelectionDeltaEvent'),
29: (89, 'NNet.Game.SControlGroupUpdateEvent'),
30: (91, 'NNet.Game.SSelectionSyncCheckEvent'),
31: (93, 'NNet.Game.SResourceTradeEvent'),
32: (94, 'NNet.Game.STriggerChatMessageEvent'),
33: (97, 'NNet.Game.SAICommunicateEvent'),
34: (98, 'NNet.Game.SSetAbsoluteGameSpeedEvent'),
35: (99, 'NNet.Game.SAddAbsoluteGameSpeedEvent'),
37: (100, 'NNet.Game.SBroadcastCheatEvent'),
38: (101, 'NNet.Game.SAllianceEvent'),
39: (102, 'NNet.Game.SUnitClickEvent'),
40: (103, 'NNet.Game.SUnitHighlightEvent'),
41: (104, 'NNet.Game.STriggerReplySelectedEvent'),
44: (65, 'NNet.Game.STriggerSkippedEvent'),
45: (109, 'NNet.Game.STriggerSoundLengthQueryEvent'),
46: (112, 'NNet.Game.STriggerSoundOffsetEvent'),
47: (113, 'NNet.Game.STriggerTransmissionOffsetEvent'),
48: (113, 'NNet.Game.STriggerTransmissionCompleteEvent'),
49: (116, 'NNet.Game.SCameraUpdateEvent'),
50: (65, 'NNet.Game.STriggerAbortMissionEvent'),
51: (105, 'NNet.Game.STriggerPurchaseMadeEvent'),
52: (65, 'NNet.Game.STriggerPurchaseExitEvent'),
53: (106, 'NNet.Game.STriggerPlanetMissionLaunchedEvent'),
54: (65, 'NNet.Game.STriggerPlanetPanelCanceledEvent'),
55: (108, 'NNet.Game.STriggerDialogControlEvent'),
56: (111, 'NNet.Game.STriggerSoundLengthSyncEvent'),
57: (118, 'NNet.Game.STriggerConversationSkippedEvent'),
58: (121, 'NNet.Game.STriggerMouseClickedEvent'),
59: (122, 'NNet.Game.STriggerMouseMovedEvent'),
63: (65, 'NNet.Game.STriggerPlanetPanelReplayEvent'),
64: (123, 'NNet.Game.STriggerSoundtrackDoneEvent'),
65: (124, 'NNet.Game.STriggerPlanetMissionSelectedEvent'),
66: (125, 'NNet.Game.STriggerKeyPressedEvent'),
67: (137, 'NNet.Game.STriggerMovieFunctionEvent'),
68: (65, 'NNet.Game.STriggerPlanetPanelBirthCompleteEvent'),
69: (65, 'NNet.Game.STriggerPlanetPanelDeathCompleteEvent'),
70: (126, 'NNet.Game.SResourceRequestEvent'),
71: (127, 'NNet.Game.SResourceRequestFulfillEvent'),
72: (128, 'NNet.Game.SResourceRequestCancelEvent'),
73: (65, 'NNet.Game.STriggerResearchPanelExitEvent'),
74: (65, 'NNet.Game.STriggerResearchPanelPurchaseEvent'),
75: (129, 'NNet.Game.STriggerResearchPanelSelectionChangedEvent'),
76: (130, 'NNet.Game.SLagMessageEvent'),
77: (65, 'NNet.Game.STriggerMercenaryPanelExitEvent'),
78: (65, 'NNet.Game.STriggerMercenaryPanelPurchaseEvent'),
79: (131, 'NNet.Game.STriggerMercenaryPanelSelectionChangedEvent'),
80: (65, 'NNet.Game.STriggerVictoryPanelExitEvent'),
81: (65, 'NNet.Game.STriggerBattleReportPanelExitEvent'),
82: (132, 'NNet.Game.STriggerBattleReportPanelPlayMissionEvent'),
83: (133, 'NNet.Game.STriggerBattleReportPanelPlaySceneEvent'),
84: (133, 'NNet.Game.STriggerBattleReportPanelSelectionChangedEvent'),
85: (106, 'NNet.Game.STriggerVictoryPanelPlayMissionAgainEvent'),
86: (65, 'NNet.Game.STriggerMovieStartedEvent'),
87: (65, 'NNet.Game.STriggerMovieFinishedEvent'),
88: (135, 'NNet.Game.SDecrementGameTimeRemainingEvent'),
89: (136, 'NNet.Game.STriggerPortraitLoadedEvent'),
90: (138, 'NNet.Game.STriggerCustomDialogDismissedEvent'),
91: (139, 'NNet.Game.STriggerGameMenuItemSelectedEvent'),
92: (140, 'NNet.Game.STriggerCameraMoveEvent'),
93: (105, 'NNet.Game.STriggerPurchasePanelSelectedPurchaseItemChangedEvent'),
94: (141, 'NNet.Game.STriggerPurchasePanelSelectedPurchaseCategoryChangedEvent'),
95: (142, 'NNet.Game.STriggerButtonPressedEvent'),
96: (65, 'NNet.Game.STriggerGameCreditsFinishedEvent'),
}
# The typeid of the NNet.Game.EEventId enum.
game_eventid_typeid = 0
# Map from protocol NNet.Game.*Message eventid to (typeid, name)
message_event_types = {
0: (143, 'NNet.Game.SChatMessage'),
1: (144, 'NNet.Game.SPingMessage'),
2: (145, 'NNet.Game.SLoadingProgressMessage'),
3: (65, 'NNet.Game.SServerPingMessage'),
}
# The typeid of the NNet.Game.EMessageId enum.
message_eventid_typeid = 1
# The typeid of NNet.SVarUint32 (the type used to encode gameloop deltas).
svaruint32_typeid = 6
# The typeid of NNet.Replay.SPlayerId (the type used to encode player ids).
replay_playerid_typeid = 8
# The typeid of NNet.Replay.SHeader (the type used to store replay game version and length).
replay_header_typeid = 13
# The typeid of NNet.Game.SDetails (the type used to store overall replay details).
game_details_typeid = 32
# The typeid of NNet.Replay.SInitData (the type used to store the inital lobby).
replay_initdata_typeid = 56
def _varuint32_value(value):
# Returns the numeric value from a SVarUint32 instance.
for k,v in value.iteritems():
return v
return 0
def _decode_event_stream(decoder, eventid_typeid, event_types, decode_player_id):
# Decodes events prefixed with a gameloop and possibly userid
gameloop = 0
while not decoder.done():
start_bits = decoder.used_bits()
# decode the gameloop delta before each event
delta = _varuint32_value(decoder.instance(svaruint32_typeid))
gameloop += delta
# decode the userid before each event
if decode_player_id:
playerid = decoder.instance(replay_playerid_typeid)
# decode the event id
eventid = decoder.instance(eventid_typeid)
typeid, typename = event_types.get(eventid, (None, None))
if typeid is None:
raise CorruptedError('eventid(%d) at %s' % (eventid, decoder))
# decode the event struct instance
event = decoder.instance(typeid)
event['_event'] = typename
event['_eventid'] = eventid
# insert gameloop and userid
event['_gameloop'] = gameloop
if decode_player_id:
event['_playerid'] = playerid
# the next event is byte aligned
decoder.byte_align()
# insert bits used in stream
event['_bits'] = decoder.used_bits() - start_bits
yield event
def decode_replay_game_events(contents):
"""Decodes and yields each game event from the contents byte string."""
decoder = BitPackedDecoder(contents, typeinfos)
for event in _decode_event_stream(decoder,
game_eventid_typeid,
game_event_types,
decode_player_id=True):
yield event
def decode_replay_message_events(contents):
"""Decodes and yields each message event from the contents byte string."""
decoder = BitPackedDecoder(contents, typeinfos)
for event in _decode_event_stream(decoder,
message_eventid_typeid,
message_event_types,
decode_player_id=True):
yield event
def decode_replay_header(contents):
"""Decodes and return the replay header from the contents byte string."""
decoder = VersionedDecoder(contents, typeinfos)
return decoder.instance(replay_header_typeid)
def decode_replay_details(contents):
"""Decodes and returns the game details from the contents byte string."""
decoder = VersionedDecoder(contents, typeinfos)
return decoder.instance(game_details_typeid)
def decode_replay_initdata(contents):
"""Decodes and return the replay init data from the contents byte string."""
decoder = BitPackedDecoder(contents, typeinfos)
return decoder.instance(replay_initdata_typeid)
def decode_replay_attributes_events(contents):
"""Decodes and yields each attribute from the contents byte string."""
buffer = BitPackedBuffer(contents, 'little')
attributes = {}
if not buffer.done():
attributes['source'] = buffer.read_bits(8)
attributes['mapNamespace'] = buffer.read_bits(32)
count = buffer.read_bits(32)
attributes['scopes'] = {}
while not buffer.done():
value = {}
value['namespace'] = buffer.read_bits(32)
value['attrid'] = attrid = buffer.read_bits(32)
scope = buffer.read_bits(8)
value['value'] = buffer.read_aligned_bytes(4)[::-1].strip('\x00')
if not scope in attributes['scopes']:
attributes['scopes'][scope] = {}
if not attrid in attributes['scopes'][scope]:
attributes['scopes'][scope][attrid] = []
attributes['scopes'][scope][attrid].append(value)
return attributes
def unit_tag(unitTagIndex, unitTagRecycle):
return (unitTagIndex << 18) + unitTagRecycle
def unit_tag_index(unitTag):
return (unitTag >> 18) & 0x00003fff
def unit_tag_recycle(unitTag):
return (unitTag) & 0x0003ffff
|
skoczen/skoczen
|
refs/heads/master
|
project/apps/manual/migrations/0015_adds_lunar_day.py
|
1
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Don't use "from appname.models import ModelName".
# Use orm.ModelName to refer to models in this application,
# and orm['appname.ModelName'] for models in other applications.
def backwards(self, orm):
"Write your backwards methods here."
models = {
u'manual.action': {
'Meta': {'ordering': "('name',)", 'object_name': 'Action'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'one_liner': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '210', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'manual.datasensitivity': {
'Meta': {'object_name': 'DataSensitivity'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'manual.emotion': {
'Meta': {'ordering': "('name',)", 'object_name': 'Emotion'},
'cause': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'helpful': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'one_liner': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '210', 'null': 'True', 'blank': 'True'}),
'symptoms': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'manual.gutterbumper': {
'Meta': {'ordering': "('date',)", 'object_name': 'GutterBumper'},
'actions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['manual.Action']", 'null': 'True', 'blank': 'True'}),
'alone_hrs': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'body_fat_percent': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'burnt_out': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'burnt_out_set': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'creativity': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'creativity_set': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2014, 6, 3, 0, 0)'}),
'emotions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['manual.Emotion']", 'null': 'True', 'blank': 'True'}),
'fell_asleep_at': ('django.db.models.fields.TimeField', [], {'default': 'datetime.time(0, 0)'}),
'friend_hrs': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'happiness': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'happiness_set': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inbox_zero': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'interacted_with_art': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'left_the_house': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'meditated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moon_phase': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'morning_mood': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'morning_mood_set': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'nature_time': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'notes': ('django.db.models.fields.TextField', [], {'default': "'86400'", 'null': 'True', 'blank': 'True'}),
'number_of_fun_beers': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'number_of_sleep_beers': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'off': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'presence': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'presence_set': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'public_hrs': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'relationship_hrs': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'sex': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'sleep_hrs': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'travelling_or_out_of_routine': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'unbusy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'unbusy_set': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weight': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'woke_up_at': ('django.db.models.fields.TimeField', [], {'default': 'datetime.time(8, 30)'}),
'work_hrs': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'worked_out': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'manual.monthlycheckin': {
'Meta': {'object_name': 'MonthlyCheckin'},
'closer_to_two_year_plan_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'closer_to_two_year_plan_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'enough_time_alone_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'enough_time_alone_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'enough_time_in_nature_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'enough_time_in_nature_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'finances_on_track_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'finances_on_track_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'getting_out_enough_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'getting_out_enough_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'habit_success_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'habit_success_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'habits_for_next_month': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'habits_from_last_month': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'happiness_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'happiness_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'have_a_space_that_is_just_mine_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'have_a_space_that_is_just_mine_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'healthy_activity_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'healthy_activity_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'healthy_drinking_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'healthy_drinking_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'healthy_eating_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'healthy_eating_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_touch_with_spirtuality_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'in_touch_with_spirtuality_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'making_the_world_a_bit_better_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'making_the_world_a_bit_better_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'making_things_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'making_things_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'month_start_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2014, 6, 3, 0, 0)'}),
'presence_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'presence_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'relationship_health_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'relationship_health_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'sex_life_is_good_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'sex_life_is_good_rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'what_is_the_identity_story_i_am_telling': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'what_is_the_relationship_story_i_am_telling': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'what_is_the_work_story_i_am_telling': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'manual.restaurant': {
'Meta': {'object_name': 'Restaurant'},
'date_went': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'have_gone': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'meal': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'manual.value': {
'Meta': {'object_name': 'Value'},
'explanation': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '210', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'manual.weeklymeal': {
'Meta': {'object_name': 'WeeklyMeal'},
'how_it_went': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ingredients': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'preparation': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'week_start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
u'manual.weight': {
'Meta': {'ordering': "('-when',)", 'object_name': 'Weight'},
'body_fat_percent': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weight': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 6, 3, 0, 0)'})
},
u'manual.workout': {
'Meta': {'object_name': 'Workout'},
'explanation': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '210', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['manual']
symmetrical = True
|
ASIX-ALS/asix-final-project-frontend
|
refs/heads/develop
|
node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
|
1509
|
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Handle version information related to Visual Stuio."""
import errno
import os
import re
import subprocess
import sys
import gyp
import glob
class VisualStudioVersion(object):
"""Information regarding a version of Visual Studio."""
def __init__(self, short_name, description,
solution_version, project_version, flat_sln, uses_vcxproj,
path, sdk_based, default_toolset=None):
self.short_name = short_name
self.description = description
self.solution_version = solution_version
self.project_version = project_version
self.flat_sln = flat_sln
self.uses_vcxproj = uses_vcxproj
self.path = path
self.sdk_based = sdk_based
self.default_toolset = default_toolset
def ShortName(self):
return self.short_name
def Description(self):
"""Get the full description of the version."""
return self.description
def SolutionVersion(self):
"""Get the version number of the sln files."""
return self.solution_version
def ProjectVersion(self):
"""Get the version number of the vcproj or vcxproj files."""
return self.project_version
def FlatSolution(self):
return self.flat_sln
def UsesVcxproj(self):
"""Returns true if this version uses a vcxproj file."""
return self.uses_vcxproj
def ProjectExtension(self):
"""Returns the file extension for the project."""
return self.uses_vcxproj and '.vcxproj' or '.vcproj'
def Path(self):
"""Returns the path to Visual Studio installation."""
return self.path
def ToolPath(self, tool):
"""Returns the path to a given compiler tool. """
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
def DefaultToolset(self):
"""Returns the msbuild toolset version that will be used in the absence
of a user override."""
return self.default_toolset
def SetupScript(self, target_arch):
"""Returns a command (with arguments) to be used to set up the
environment."""
# Check if we are running in the SDK command line environment and use
# the setup script from the SDK if so. |target_arch| should be either
# 'x86' or 'x64'.
assert target_arch in ('x86', 'x64')
sdk_dir = os.environ.get('WindowsSDKDir')
if self.sdk_based and sdk_dir:
return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
'/' + target_arch]
else:
# We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
# isn't always.
if target_arch == 'x86':
if self.short_name >= '2013' and self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
# VS2013 and later, non-Express have a x64-x86 cross that we want
# to prefer.
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
# Otherwise, the standard x86 compiler.
return [os.path.normpath(
os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
else:
assert target_arch == 'x64'
arg = 'x86_amd64'
# Use the 64-on-64 compiler if we're not using an express
# edition and we're running on a 64bit OS.
if self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
arg = 'amd64'
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
def _RegistryQueryBase(sysdir, key, value):
"""Use reg.exe to read a particular key.
While ideally we might use the win32 module, we would like gyp to be
python neutral, so for instance cygwin python lacks this module.
Arguments:
sysdir: The system subdirectory to attempt to launch reg.exe from.
key: The registry key to read from.
value: The particular value to read.
Return:
stdout from reg.exe, or None for failure.
"""
# Skip if not on Windows or Python Win32 setup issue
if sys.platform not in ('win32', 'cygwin'):
return None
# Setup params to pass to and attempt to launch reg.exe
cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
'query', key]
if value:
cmd.extend(['/v', value])
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
# Note that the error text may be in [1] in some cases
text = p.communicate()[0]
# Check return code from reg.exe; officially 0==success and 1==error
if p.returncode:
return None
return text
def _RegistryQuery(key, value=None):
r"""Use reg.exe to read a particular key through _RegistryQueryBase.
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
that fails, it falls back to System32. Sysnative is available on Vista and
up and available on Windows Server 2003 and XP through KB patch 942589. Note
that Sysnative will always fail if using 64-bit python due to it being a
virtual directory and System32 will work correctly in the first place.
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
Arguments:
key: The registry key.
value: The particular registry value to read (optional).
Return:
stdout from reg.exe, or None for failure.
"""
text = None
try:
text = _RegistryQueryBase('Sysnative', key, value)
except OSError, e:
if e.errno == errno.ENOENT:
text = _RegistryQueryBase('System32', key, value)
else:
raise
return text
def _RegistryGetValueUsingWinReg(key, value):
"""Use the _winreg module to obtain the value of a registry key.
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure. Throws
ImportError if _winreg is unavailable.
"""
import _winreg
try:
root, subkey = key.split('\\', 1)
assert root == 'HKLM' # Only need HKLM for now.
with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
return _winreg.QueryValueEx(hkey, value)[0]
except WindowsError:
return None
def _RegistryGetValue(key, value):
"""Use _winreg or reg.exe to obtain the value of a registry key.
Using _winreg is preferable because it solves an issue on some corporate
environments where access to reg.exe is locked down. However, we still need
to fallback to reg.exe for the case where the _winreg module is not available
(for example in cygwin python).
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure.
"""
try:
return _RegistryGetValueUsingWinReg(key, value)
except ImportError:
pass
# Fallback to reg.exe if we fail to import _winreg.
text = _RegistryQuery(key, value)
if not text:
return None
# Extract value.
match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
if not match:
return None
return match.group(1)
def _CreateVersion(name, path, sdk_based=False):
"""Sets up MSVS project generation.
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
passed in that doesn't match a value in versions python will throw a error.
"""
if path:
path = os.path.normpath(path)
versions = {
'2015': VisualStudioVersion('2015',
'Visual Studio 2015',
solution_version='12.00',
project_version='14.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v140'),
'2013': VisualStudioVersion('2013',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2013e': VisualStudioVersion('2013e',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2012': VisualStudioVersion('2012',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2012e': VisualStudioVersion('2012e',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2010': VisualStudioVersion('2010',
'Visual Studio 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2010e': VisualStudioVersion('2010e',
'Visual C++ Express 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2008': VisualStudioVersion('2008',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2008e': VisualStudioVersion('2008e',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005': VisualStudioVersion('2005',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005e': VisualStudioVersion('2005e',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
}
return versions[str(name)]
def _ConvertToCygpath(path):
"""Convert to cygwin path if we are using cygwin."""
if sys.platform == 'cygwin':
p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
path = p.communicate()[0].strip()
return path
def _DetectVisualStudioVersions(versions_to_check, force_express):
"""Collect the list of installed visual studio versions.
Returns:
A list of visual studio versions installed in descending order of
usage preference.
Base this on the registry and a quick check if devenv.exe exists.
Only versions 8-10 are considered.
Possibilities are:
2005(e) - Visual Studio 2005 (8)
2008(e) - Visual Studio 2008 (9)
2010(e) - Visual Studio 2010 (10)
2012(e) - Visual Studio 2012 (11)
2013(e) - Visual Studio 2013 (12)
2015 - Visual Studio 2015 (14)
Where (e) is e for express editions of MSVS and blank otherwise.
"""
version_to_year = {
'8.0': '2005',
'9.0': '2008',
'10.0': '2010',
'11.0': '2012',
'12.0': '2013',
'14.0': '2015',
}
versions = []
for version in versions_to_check:
# Old method of searching for which VS version is installed
# We don't use the 2010-encouraged-way because we also want to get the
# path to the binaries, which it doesn't offer.
keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Microsoft\VCExpress\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version]
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], 'InstallDir')
if not path:
continue
path = _ConvertToCygpath(path)
# Check for full.
full_path = os.path.join(path, 'devenv.exe')
express_path = os.path.join(path, '*express.exe')
if not force_express and os.path.exists(full_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version],
os.path.join(path, '..', '..')))
# Check for express.
elif glob.glob(express_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..', '..')))
# The old method above does not work when only SDK is installed.
keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], version)
if not path:
continue
path = _ConvertToCygpath(path)
if version != '14.0': # There is no Express edition for 2015.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..'), sdk_based=True))
return versions
def SelectVisualStudioVersion(version='auto', allow_fallback=True):
"""Select which version of Visual Studio projects to generate.
Arguments:
version: Hook to allow caller to force a particular version (vs auto).
Returns:
An object representing a visual studio project format version.
"""
# In auto mode, check environment variable for override.
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
'auto': ('14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
'2008e': ('9.0',),
'2010': ('10.0',),
'2010e': ('10.0',),
'2012': ('11.0',),
'2012e': ('11.0',),
'2013': ('12.0',),
'2013e': ('12.0',),
'2015': ('14.0',),
}
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
if override_path:
msvs_version = os.environ.get('GYP_MSVS_VERSION')
if not msvs_version:
raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
'set to a particular version (e.g. 2010e).')
return _CreateVersion(msvs_version, override_path, sdk_based=True)
version = str(version)
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
if not versions:
if not allow_fallback:
raise ValueError('Could not locate Visual Studio installation.')
if version == 'auto':
# Default to 2005 if we couldn't find anything
return _CreateVersion('2005', None)
else:
return _CreateVersion(version, None)
return versions[0]
|
wfxiang08/django185
|
refs/heads/master
|
tests/initial_sql_regress/models.py
|
77
|
"""
Regression tests for initial SQL insertion.
"""
from django.db import models
class Simple(models.Model):
name = models.CharField(max_length=50)
|
JNRowe/upoints
|
refs/heads/main
|
tests/test_osm.py
|
1
|
#
"""test_osm - Test osm support"""
# Copyright © 2012-2021 James Rowe <[email protected]>
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of upoints.
#
# upoints is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# upoints is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# upoints. If not, see <http://www.gnu.org/licenses/>.
from operator import attrgetter
from pytest import mark
from upoints import point, utils
from upoints.osm import Node, Osm, Way, etree, get_area_url
from tests.utils import xml_compare, xml_str_compare
@mark.parametrize(
'size, results',
[
(
3,
(
-0.26486443825283734,
51.98800340214556,
-0.17713556174716266,
52.04199659785444,
),
),
(
12,
(
-0.3964574335910109,
51.907013608582226,
-0.04554256640898919,
52.12298639141776,
),
),
],
)
def test_get_area_url(size, results):
assert (
get_area_url(point.Point(52.015, -0.221), size)
== 'http://api.openstreetmap.org/api/0.5/map?bbox=%s,%s,%s,%s'
% results
)
class TestNode:
def setup(self):
self.bare = Node(0, 52, 0)
self.named = Node(
0, 52, 0, True, 'jnrowe', utils.Timestamp(2008, 1, 25)
)
self.tagged = Node(0, 52, 0, tags={'key': 'value'})
@mark.parametrize(
'node, result',
[
('bare', 'Node(0, 52.0, 0.0, False, None, None, None)'),
(
'named',
"Node(0, 52.0, 0.0, True, 'jnrowe', "
'Timestamp(2008, 1, 25, 0, 0), None)',
),
(
'tagged',
"Node(0, 52.0, 0.0, False, None, None, {'key': 'value'})",
),
],
)
def test___repr__(self, node, result):
assert repr(getattr(self, node)) == result
@mark.parametrize(
'node, result',
[
('bare', """Node 0 (52°00′00″N, 000°00′00″E)"""),
(
'named',
"""Node 0 (52°00′00″N, 000°00′00″E) [visible, user: jnrowe, """
'timestamp: 2008-01-25T00:00:00+00:00]',
),
('tagged', """Node 0 (52°00′00″N, 000°00′00″E) [key: value]"""),
],
)
def test___str__(self, node, result):
assert str(getattr(self, node)) == result
@mark.parametrize(
'node, result',
[
('bare', '<node id="0" lat="52.0" lon="0.0" visible="false"/>'),
(
'named',
'<node id="0" lat="52.0" lon="0.0" '
'timestamp="2008-01-25T00:00:00+00:00" user="jnrowe" '
'visible="true"/>',
),
(
'tagged',
'<node id="0" lat="52.0" lon="0.0" visible="false">'
'<tag k="key" v="value"/>'
'</node>',
),
],
)
def test_toosm(self, node, result):
xml_str_compare(result, etree.tostring(getattr(self, node).toosm()))
@mark.parametrize(
'size, results',
[
(
3,
(
-0.04384973831146972,
51.97300340214557,
0.04384973831146972,
52.02699659785445,
),
),
(
12,
(
-0.1753986342770412,
51.892013608582225,
0.1753986342770412,
52.10798639141778,
),
),
],
)
def test_get_area_url(self, size, results):
assert (
self.bare.get_area_url(size)
== 'http://api.openstreetmap.org/api/0.5/map?bbox=%s,%s,%s,%s'
% results
)
def test_fetch_area_osm(self):
# FIXME: The following test is skipped, because the Osm object doesn't
# support a reliable way __repr__ method.
# assert Home.fetch_area_osm(3
pass
class TestWay:
def setup(self):
self.bare = Way(0, (0, 1, 2))
self.named = Way(
0, (0, 1, 2), True, 'jnrowe', utils.Timestamp(2008, 1, 25)
)
self.tagged = Way(0, (0, 1, 2), tags={'key': 'value'})
@mark.parametrize(
'node, result',
[
('bare', 'Way(0, [0, 1, 2], False, None, None, None)'),
(
'named',
"Way(0, [0, 1, 2], True, 'jnrowe', Timestamp(2008, 1, 25, 0, 0), "
'None)',
),
(
'tagged',
"Way(0, [0, 1, 2], False, None, None, {'key': 'value'})",
),
],
)
def test___repr__(self, node, result):
assert repr(getattr(self, node)) == result
@mark.parametrize(
'node, result',
[
('bare', 'Way 0 (nodes: 0, 1, 2)'),
(
'named',
'Way 0 (nodes: 0, 1, 2) [visible, user: jnrowe, timestamp: '
'2008-01-25T00:00:00+00:00]',
),
('tagged', 'Way 0 (nodes: 0, 1, 2) [key: value]'),
],
)
def test___str__(self, node, result):
assert str(getattr(self, node)) == result
def test___str___list(self):
nodes = [
Node(
0,
52.015749,
-0.221765,
True,
'jnrowe',
utils.Timestamp(2008, 1, 25, 12, 52, 11),
None,
),
Node(
1,
52.015761,
-0.221767,
True,
None,
utils.Timestamp(2008, 1, 25, 12, 53, 14),
{'created_by': 'hand', 'highway': 'crossing'},
),
Node(
2,
52.015754,
-0.221766,
True,
'jnrowe',
utils.Timestamp(2008, 1, 25, 12, 52, 30),
{'amenity': 'pub'},
),
]
assert self.tagged.__str__(nodes).splitlines() == [
'Way 0 [key: value]',
""" Node 0 (52°00′56″N, 000°13′18″W) [visible, user: """
'jnrowe, timestamp: 2008-01-25T12:52:11+00:00]',
""" Node 1 (52°00′56″N, 000°13′18″W) [visible, timestamp: """
'2008-01-25T12:53:14+00:00, created_by: hand, highway: crossing]',
""" Node 2 (52°00′56″N, 000°13′18″W) [visible, user: """
'jnrowe, timestamp: 2008-01-25T12:52:30+00:00, amenity: pub]',
]
@mark.parametrize(
'node, result',
[
(
'bare',
'<way id="0" visible="false">'
'<nd ref="0"/><nd ref="1"/><nd ref="2"/>'
'</way>',
),
(
'named',
'<way id="0" timestamp="2008-01-25T00:00:00+00:00" user="jnrowe" '
'visible="true">'
'<nd ref="0"/><nd ref="1"/><nd ref="2"/>'
'</way>',
),
(
'tagged',
'<way id="0" visible="false">'
'<tag k="key" v="value"/>'
'<nd ref="0"/><nd ref="1"/><nd ref="2"/>'
'</way>',
),
],
)
def test_toosm(self, node, result):
xml_str_compare(result, etree.tostring(getattr(self, node).toosm()))
class TestOsm:
def setup(self):
with open('tests/data/osm') as f:
self.region = Osm(f)
def test_import_locations(self):
assert [
str(x)
for x in sorted(
(x for x in self.region if isinstance(x, Node)),
key=attrgetter('ident'),
)
] == [
"""Node 0 (52°00′56″N, 000°13′18″W) [visible, user: jnrowe, """
'timestamp: 2008-01-25T12:52:11+00:00]',
"""Node 1 (52°00′56″N, 000°13′18″W) [visible, timestamp: """
'2008-01-25T12:53:00+00:00, created_by: hand, highway: crossing]',
"""Node 2 (52°00′56″N, 000°13′18″W) [visible, user: jnrowe, """
'timestamp: 2008-01-25T12:52:30+00:00, amenity: pub]',
]
def test_export_osm_file(self):
export = self.region.export_osm_file()
osm_xml = etree.parse('tests/data/osm')
for e1, e2 in zip(export.getiterator(), osm_xml.getiterator()):
xml_compare(e1, e2)
|
BeDjango/intef-openedx
|
refs/heads/master
|
lms/djangoapps/courseware/tests/test_field_overrides.py
|
36
|
"""
Tests for `field_overrides` module.
"""
import unittest
from nose.plugins.attrib import attr
from django.test.utils import override_settings
from xblock.field_data import DictFieldData
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import (
ModuleStoreTestCase,
)
from ..field_overrides import (
disable_overrides,
FieldOverrideProvider,
OverrideFieldData,
resolve_dotted,
)
TESTUSER = "testuser"
@attr('shard_1')
@override_settings(FIELD_OVERRIDE_PROVIDERS=(
'courseware.tests.test_field_overrides.TestOverrideProvider',))
class OverrideFieldDataTests(ModuleStoreTestCase):
"""
Tests for `OverrideFieldData`.
"""
def setUp(self):
super(OverrideFieldDataTests, self).setUp()
self.course = CourseFactory.create(enable_ccx=True)
OverrideFieldData.provider_classes = None
def tearDown(self):
super(OverrideFieldDataTests, self).tearDown()
OverrideFieldData.provider_classes = None
def make_one(self):
"""
Factory method.
"""
return OverrideFieldData.wrap(TESTUSER, self.course, DictFieldData({
'foo': 'bar',
'bees': 'knees',
}))
def test_get(self):
data = self.make_one()
self.assertEqual(data.get('block', 'foo'), 'fu')
self.assertEqual(data.get('block', 'bees'), 'knees')
with disable_overrides():
self.assertEqual(data.get('block', 'foo'), 'bar')
def test_set(self):
data = self.make_one()
data.set('block', 'foo', 'yowza')
self.assertEqual(data.get('block', 'foo'), 'fu')
with disable_overrides():
self.assertEqual(data.get('block', 'foo'), 'yowza')
def test_delete(self):
data = self.make_one()
data.delete('block', 'foo')
self.assertEqual(data.get('block', 'foo'), 'fu')
with disable_overrides():
# Since field_data is responsible for attribute access, you'd
# expect it to raise AttributeError. In fact, it raises KeyError,
# so we check for that.
with self.assertRaises(KeyError):
data.get('block', 'foo')
def test_has(self):
data = self.make_one()
self.assertTrue(data.has('block', 'foo'))
self.assertTrue(data.has('block', 'bees'))
self.assertTrue(data.has('block', 'oh'))
with disable_overrides():
self.assertFalse(data.has('block', 'oh'))
def test_many(self):
data = self.make_one()
data.set_many('block', {'foo': 'baz', 'ah': 'ic'})
self.assertEqual(data.get('block', 'foo'), 'fu')
self.assertEqual(data.get('block', 'ah'), 'ic')
with disable_overrides():
self.assertEqual(data.get('block', 'foo'), 'baz')
@override_settings(FIELD_OVERRIDE_PROVIDERS=())
def test_no_overrides_configured(self):
data = self.make_one()
self.assertIsInstance(data, DictFieldData)
@attr('shard_1')
class ResolveDottedTests(unittest.TestCase):
"""
Tests for `resolve_dotted`.
"""
def test_bad_sub_import(self):
with self.assertRaises(ImportError):
resolve_dotted('courseware.tests.test_foo')
def test_bad_import(self):
with self.assertRaises(ImportError):
resolve_dotted('nosuchpackage')
def test_import_something_that_isnt_already_loaded(self):
self.assertEqual(
resolve_dotted('courseware.tests.animport.SOMENAME'),
'bar'
)
class TestOverrideProvider(FieldOverrideProvider):
"""
A concrete implementation of `FieldOverrideProvider` for testing.
"""
def get(self, block, name, default):
assert self.user is TESTUSER
assert block == 'block'
if name == 'foo':
return 'fu'
if name == 'oh':
return 'man'
return default
@classmethod
def enabled_for(cls, course):
return True
def inject_field_overrides(blocks, course, user):
"""
Apparently the test harness doesn't use LmsFieldStorage, and I'm
not sure if there's a way to poke the test harness to do so. So,
we'll just inject the override field storage in this brute force
manner.
"""
OverrideFieldData.provider_classes = None
for block in blocks:
block._field_data = OverrideFieldData.wrap( # pylint: disable=protected-access
user, course, block._field_data) # pylint: disable=protected-access
|
rohe/saml2test2
|
refs/heads/master
|
testtool/idp_test/__init__.py
|
22
|
__author__ = 'roland'
|
linktlh/Toontown-journey
|
refs/heads/master
|
toontown/effects/FireworkShowMixin.py
|
1
|
from direct.directnotify import DirectNotifyGlobal
from direct.distributed.ClockDelta import *
from direct.interval.IntervalGlobal import *
from toontown.toonbase.ToontownGlobals import *
from toontown.toonbase import TTLocalizer
from toontown.parties import PartyGlobals
import Fireworks
import FireworkShows
from FireworkGlobals import skyTransitionDuration, preShowPauseDuration
from toontown.effects.FireworkShow import FireworkShow
FIREWORK_SHOW_LOCATION = {
ToontownCentral: (150, 0, 80, 90, 0, 0),
DonaldsDock: (150, 0, 50, 90, 0, 0),
MinniesMelodyland: (150, -25, 40, 90, 0, 0),
TheBrrrgh: (-200, -60, 50, 270, 0, 0),
DaisyGardens: (-80, -50, 60, 0, 0, 0),
OutdoorZone: (-450, -80, 140, 300, 0, 0),
GoofySpeedway: (60, -350, 80, 20, 0, 0),
DonaldsDreamland: (-160, 0, 80, 270, 0, 0),
PartyHood: (0, -400, 120, 0, 0, 0)
}
PRESHOW_DICT = {
JULY4_FIREWORKS: [
TTLocalizer.FireworksInstructions,
TTLocalizer.FireworksJuly4Beginning,
TTLocalizer.FireworksJuly4Ending,
['tt_summer', 'firework_music']
],
NEWYEARS_FIREWORKS: [
TTLocalizer.FireworksInstructions,
TTLocalizer.FireworksNewYearsEveBeginning,
TTLocalizer.FireworksNewYearsEveEnding,
['new_years_fireworks_music', 'tt_s_ara_gen_fireworks_auldLangSyne']
],
PartyGlobals.FireworkShows.Summer: [
TTLocalizer.FireworksActivityInstructions,
TTLocalizer.FireworksActivityBeginning,
TTLocalizer.FireworksActivityEnding,
['tt_party1', 'tt_party2']
],
COMBO_FIREWORKS: [
TTLocalizer.FireworksInstructions,
TTLocalizer.FireworksComboBeginning,
TTLocalizer.FireworksComboEnding,
['new_years_fireworks_music', 'tt_s_ara_gen_fireworks_auldLangSyne']
]
}
POSTSHOW_DICT = {
JULY4_FIREWORKS: TTLocalizer.FireworksJuly4Ending,
NEWYEARS_FIREWORKS: TTLocalizer.FireworksNewYearsEveEnding,
PartyGlobals.FireworkShows.Summer: TTLocalizer.FireworksActivityEnding,
COMBO_FIREWORKS: TTLocalizer.FireworksComboEnding
}
class FireworkShowMixin:
notify = DirectNotifyGlobal.directNotify.newCategory('FireworkShowMixin')
def __init__(self, restorePlaygroundMusic = True, startDelay = 0.0):
self.currentShow = None
self.restorePlaygroundMusic = restorePlaygroundMusic
self.startDelay = startDelay
self.timestamp = None
self.fireworkShow = None
self.eventId = JULY4_FIREWORKS
self.accept('MusicEnabled', self.startMusic)
return
def disable(self):
if self.currentShow:
self.currentShow.pause()
self.currentShow = None
if base.config.GetBool('want-old-fireworks', False):
ivalMgr.finishIntervalsMatching('shootFirework*')
else:
self.destroyFireworkShow()
hood = self.getHood()
if hood is not None:
if hood.id == DonaldsDock:
hood.whiteFogColor = Vec4(0.8, 0.8, 0.8, 1)
self.restoreCameraLens()
if hood is not None:
if hasattr(hood, 'loader'):
self.getGeom().clearColorScale()
if hasattr(hood, 'sky'):
self.getSky().show()
self.getSky().clearColorScale()
if hasattr(base, 'localAvatar') and base.localAvatar:
base.localAvatar.clearColorScale()
base.setBackgroundColor(DefaultBackgroundColor)
self.ignoreAll()
def startMusic(self):
if self.timestamp:
self.getLoader().music.stop()
t = globalClockDelta.localElapsedTime(self.timestamp) - self.startDelay
base.playMusic(self.showMusic, 0, 1, 1, max(0, t))
def shootFirework(self, x, y, z, style, color1, color2):
amp = 5
Fireworks.shootFirework(style, x, y, z, color1, color2, amp)
def startShow(self, eventId, style, songId, timestamp, root = render):
t = globalClockDelta.localElapsedTime(timestamp) - self.startDelay
self.timestamp = timestamp
self.showMusic = None
self.eventId = eventId
if base.config.GetBool('want-old-fireworks', 0):
self.currentShow = self.getFireworkShowIval(eventId, style, songId, t)
if self.currentShow:
self.currentShow.start(t)
return
self.createFireworkShow()
if t > self.fireworkShow.getShowDuration():
return
preShow = self.preShow(eventId, songId, t)
postShow = self.postShow(eventId)
beginFireworkShow = Func(self.beginFireworkShow, max(0, t), root)
self.currentShow = Sequence(
preShow, beginFireworkShow, Wait(max(0, self.fireworkShow.getShowDuration() - max(0, t))), postShow
)
self.currentShow.start()
return
def preShow(self, eventId, songId, startT):
if eventId not in PRESHOW_DICT:
FireworkShowMixin.notify.warning('Invalid fireworks event ID: %d' % eventId)
return None
instructionMessage, startMessage, endMessage, songs = PRESHOW_DICT[eventId]
musicFile = 'phase_4/audio/bgm/%s.ogg' % songs[songId]
self.showMusic = loader.loadMusic(musicFile)
self.showMusic.setVolume(1)
def __lightDecorationOn__():
place = base.cr.playGame.getPlace()
if place is None:
return
if hasattr(place, 'halloweenLights'):
if not self.__checkStreetValidity():
return
else:
place.halloweenLights = base.cr.playGame.getPlace().loader.geom.findAllMatches('**/*light*')
place.halloweenLights.extend(base.cr.playGame.getPlace().loader.geom.findAllMatches('**/*lamp*'))
for light in place.halloweenLights:
light.setColorScaleOff(0)
elif not self.__checkHoodValidity():
return
else:
place.loader.hood.halloweenLights = base.cr.playGame.hood.loader.geom.findAllMatches('**/*light*')
place.loader.hood.halloweenLights.extend(base.cr.playGame.hood.loader.geom.findAllMatches('**/*lamp*'))
for light in base.cr.playGame.hood.halloweenLights:
light.setColorScaleOff(0)
if self.fireworkShow and not self.fireworkShow.isEmpty():
self.fireworkShow.setColorScaleOff(0)
return
if self.__checkHoodValidity() and hasattr(base.cr.playGame, 'hood') and base.cr.playGame.hood \
and hasattr(base.cr.playGame.hood, 'sky') and base.cr.playGame.hood.sky:
return Sequence(
Func(base.localAvatar.setSystemMessage, 0, startMessage),
Parallel(
LerpColorScaleInterval(base.cr.playGame.hood.sky, 2.5, Vec4(0.0, 0.0, 0.0, 1.0)),
LerpColorScaleInterval(base.cr.playGame.hood.loader.geom, 2.5, Vec4(0.25, 0.25, 0.35, 1)),
LerpColorScaleInterval(base.localAvatar, 2.5, Vec4(0.85, 0.85, 0.85, 1)), Func(__lightDecorationOn__)
),
Func(base.setBackgroundColor, Vec4(0, 0, 0, 1)),
Func(self.__checkDDFog), Func(base.camLens.setFar, 1000.0),
Func(base.cr.playGame.hood.sky.hide),
Func(base.localAvatar.setSystemMessage, 0, instructionMessage),
Func(self.getLoader().music.stop),
Wait(2.0),
Func(base.playMusic, self.showMusic, 0, 1, 0.8, max(0, startT))
)
return None
def restoreCameraLens(self):
hood = self.getHood()
if hood is not None:
if hood.id == GoofySpeedway or hood.id == OutdoorZone:
base.camLens.setFar(SpeedwayCameraFar)
else:
base.camLens.setFar(DefaultCameraFar)
def postShow(self, eventId):
endMessage = POSTSHOW_DICT.get(eventId)
if endMessage is None:
FireworkShowMixin.notify.warning('Invalid fireworks event ID: %d' % eventId)
return None
if self.__checkHoodValidity() and hasattr(base.cr.playGame.hood, 'sky') and base.cr.playGame.hood.sky:
postShow = Sequence(
Func(base.cr.playGame.hood.sky.show),
Parallel(
LerpColorScaleInterval(base.cr.playGame.hood.sky, 2.5, Vec4(1, 1, 1, 1)),
LerpColorScaleInterval(base.cr.playGame.hood.loader.geom, 2.5, Vec4(1, 1, 1, 1)),
LerpColorScaleInterval(base.localAvatar, 2.5, Vec4(1, 1, 1, 1))
),
Func(self.__restoreDDFog),
Func(self.restoreCameraLens),
Func(base.setBackgroundColor, DefaultBackgroundColor),
Func(self.showMusic.stop),
Func(base.localAvatar.setSystemMessage, 0, endMessage)
)
if self.restorePlaygroundMusic:
postShow.append(Wait(2.0))
postShow.append(Func(base.playMusic, self.getLoader().music, 1, 1, 0.8))
return postShow
def createFireworkShow(self):
if not self.fireworkShow:
self.fireworkShow = FireworkShow(self.eventId)
def destroyFireworkShow(self):
if self.fireworkShow:
self.fireworkShow.cleanupShow()
self.fireworkShow = None
return
def beginFireworkShow(self, timeStamp, root):
if self.fireworkShow and not self.fireworkShow.isPlaying():
self.fireworkShow.begin(timeStamp)
self.fireworkShow.reparentTo(root)
hoodId = self.getHood().id
if hoodId in FIREWORK_SHOW_LOCATION:
self.fireworkShow.setPosHpr(*FIREWORK_SHOW_LOCATION[hoodId])
if hoodId == PartyHood:
self.fireworkShow.setScale(1.8)
def getFireworkShowIval(self, eventId, index, songId, startT):
show = FireworkShows.getShow(eventId, index)
if show is None:
FireworkShowMixin.notify.warning('could not find firework show: index: %s' % index)
return
preShow = self.preShow(eventId, songId, startT)
mainShow = Sequence()
currentT = skyTransitionDuration + preShowPauseDuration
for effect in show:
waitTime, style, colorIndex1, colorIndex2, amp, x, y, z = effect
if waitTime > 0:
currentT += waitTime
mainShow.append(Wait(waitTime))
if currentT >= startT:
mainShow.append(Func(Fireworks.shootFirework, style, x, y, z, colorIndex1, colorIndex2, amp))
postShow = self.postShow(eventId)
return Sequence(preShow, mainShow, postShow)
def clearMyColorScales(self):
if self.getGeom() and not self.getGeom().isEmpty():
self.getGeom().clearColorScale()
if self.getSky() and not self.getSky().isEmpty():
self.getSky().clearColorScale()
def getLoader(self):
if base.cr.playGame.hood is not None:
return base.cr.playGame.hood.loader
return
def getHood(self):
if base.cr.playGame.hood is not None:
return base.cr.playGame.hood
return
def getGeom(self):
loader = self.getLoader()
if loader:
return loader.geom
return None
def getSky(self):
hood = self.getHood()
if hood:
return hood.sky
return None
def __checkDDFog(self):
if self.getHood().id == DonaldsDock:
self.getHood().whiteFogColor = Vec4(0.2, 0.2, 0.2, 1)
if hasattr(base.cr.playGame.getPlace(), 'cameraSubmerged'):
if not base.cr.playGame.getPlace().cameraSubmerged:
self.getHood().setWhiteFog()
def __restoreDDFog(self):
if self.getHood().id == DonaldsDock:
self.getHood().whiteFogColor = Vec4(0.8, 0.8, 0.8, 1)
if hasattr(base.cr.playGame.getPlace(), 'cameraSubmerged'):
if not base.cr.playGame.getPlace().cameraSubmerged:
self.getHood().setWhiteFog()
def __checkStreetValidity(self):
return hasattr(base.cr.playGame, 'getPlace') and base.cr.playGame.getPlace()\
and hasattr(base.cr.playGame.getPlace(), 'loader') and base.cr.playGame.getPlace().loader\
and hasattr(base.cr.playGame.getPlace().loader, 'geom') and base.cr.playGame.getPlace().loader.geom
def __checkHoodValidity(self):
return hasattr(base.cr.playGame, 'hood') and base.cr.playGame.hood\
and hasattr(base.cr.playGame.hood, 'loader') and base.cr.playGame.hood.loader\
and hasattr(base.cr.playGame.hood.loader, 'geom') and base.cr.playGame.hood.loader.geom
|
arraypan/Food-Stickers
|
refs/heads/master
|
FoodStickersMessages/lib/python3.5/site-packages/wheel/tool/__init__.py
|
232
|
"""
Wheel command-line utility.
"""
import os
import hashlib
import sys
import json
import wheel.paths
from glob import iglob
from .. import signatures
from ..util import (urlsafe_b64decode, urlsafe_b64encode, native, binary,
matches_requirement)
from ..install import WheelFile
def require_pkgresources(name):
try:
import pkg_resources
except ImportError:
raise RuntimeError("'{0}' needs pkg_resources (part of setuptools).".format(name))
import argparse
class WheelError(Exception): pass
# For testability
def get_keyring():
try:
from ..signatures import keys
import keyring
assert keyring.get_keyring().priority
except (ImportError, AssertionError):
raise WheelError("Install wheel[signatures] (requires keyring, keyrings.alt, pyxdg) for signatures.")
return keys.WheelKeys, keyring
def keygen(get_keyring=get_keyring):
"""Generate a public/private key pair."""
WheelKeys, keyring = get_keyring()
ed25519ll = signatures.get_ed25519ll()
wk = WheelKeys().load()
keypair = ed25519ll.crypto_sign_keypair()
vk = native(urlsafe_b64encode(keypair.vk))
sk = native(urlsafe_b64encode(keypair.sk))
kr = keyring.get_keyring()
kr.set_password("wheel", vk, sk)
sys.stdout.write("Created Ed25519 keypair with vk={0}\n".format(vk))
sys.stdout.write("in {0!r}\n".format(kr))
sk2 = kr.get_password('wheel', vk)
if sk2 != sk:
raise WheelError("Keyring is broken. Could not retrieve secret key.")
sys.stdout.write("Trusting {0} to sign and verify all packages.\n".format(vk))
wk.add_signer('+', vk)
wk.trust('+', vk)
wk.save()
def sign(wheelfile, replace=False, get_keyring=get_keyring):
"""Sign a wheel"""
WheelKeys, keyring = get_keyring()
ed25519ll = signatures.get_ed25519ll()
wf = WheelFile(wheelfile, append=True)
wk = WheelKeys().load()
name = wf.parsed_filename.group('name')
sign_with = wk.signers(name)[0]
sys.stdout.write("Signing {0} with {1}\n".format(name, sign_with[1]))
vk = sign_with[1]
kr = keyring.get_keyring()
sk = kr.get_password('wheel', vk)
keypair = ed25519ll.Keypair(urlsafe_b64decode(binary(vk)),
urlsafe_b64decode(binary(sk)))
record_name = wf.distinfo_name + '/RECORD'
sig_name = wf.distinfo_name + '/RECORD.jws'
if sig_name in wf.zipfile.namelist():
raise WheelError("Wheel is already signed.")
record_data = wf.zipfile.read(record_name)
payload = {"hash":"sha256=" + native(urlsafe_b64encode(hashlib.sha256(record_data).digest()))}
sig = signatures.sign(payload, keypair)
wf.zipfile.writestr(sig_name, json.dumps(sig, sort_keys=True))
wf.zipfile.close()
def unsign(wheelfile):
"""
Remove RECORD.jws from a wheel by truncating the zip file.
RECORD.jws must be at the end of the archive. The zip file must be an
ordinary archive, with the compressed files and the directory in the same
order, and without any non-zip content after the truncation point.
"""
import wheel.install
vzf = wheel.install.VerifyingZipFile(wheelfile, "a")
info = vzf.infolist()
if not (len(info) and info[-1].filename.endswith('/RECORD.jws')):
raise WheelError("RECORD.jws not found at end of archive.")
vzf.pop()
vzf.close()
def verify(wheelfile):
"""Verify a wheel.
The signature will be verified for internal consistency ONLY and printed.
Wheel's own unpack/install commands verify the manifest against the
signature and file contents.
"""
wf = WheelFile(wheelfile)
sig_name = wf.distinfo_name + '/RECORD.jws'
sig = json.loads(native(wf.zipfile.open(sig_name).read()))
verified = signatures.verify(sig)
sys.stderr.write("Signatures are internally consistent.\n")
sys.stdout.write(json.dumps(verified, indent=2))
sys.stdout.write('\n')
def unpack(wheelfile, dest='.'):
"""Unpack a wheel.
Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}
is the package name and {ver} its version.
:param wheelfile: The path to the wheel.
:param dest: Destination directory (default to current directory).
"""
wf = WheelFile(wheelfile)
namever = wf.parsed_filename.group('namever')
destination = os.path.join(dest, namever)
sys.stderr.write("Unpacking to: %s\n" % (destination))
wf.zipfile.extractall(destination)
wf.zipfile.close()
def install(requirements, requirements_file=None,
wheel_dirs=None, force=False, list_files=False,
dry_run=False):
"""Install wheels.
:param requirements: A list of requirements or wheel files to install.
:param requirements_file: A file containing requirements to install.
:param wheel_dirs: A list of directories to search for wheels.
:param force: Install a wheel file even if it is not compatible.
:param list_files: Only list the files to install, don't install them.
:param dry_run: Do everything but the actual install.
"""
# If no wheel directories specified, use the WHEELPATH environment
# variable, or the current directory if that is not set.
if not wheel_dirs:
wheelpath = os.getenv("WHEELPATH")
if wheelpath:
wheel_dirs = wheelpath.split(os.pathsep)
else:
wheel_dirs = [ os.path.curdir ]
# Get a list of all valid wheels in wheel_dirs
all_wheels = []
for d in wheel_dirs:
for w in os.listdir(d):
if w.endswith('.whl'):
wf = WheelFile(os.path.join(d, w))
if wf.compatible:
all_wheels.append(wf)
# If there is a requirements file, add it to the list of requirements
if requirements_file:
# If the file doesn't exist, search for it in wheel_dirs
# This allows standard requirements files to be stored with the
# wheels.
if not os.path.exists(requirements_file):
for d in wheel_dirs:
name = os.path.join(d, requirements_file)
if os.path.exists(name):
requirements_file = name
break
with open(requirements_file) as fd:
requirements.extend(fd)
to_install = []
for req in requirements:
if req.endswith('.whl'):
# Explicitly specified wheel filename
if os.path.exists(req):
wf = WheelFile(req)
if wf.compatible or force:
to_install.append(wf)
else:
msg = ("{0} is not compatible with this Python. "
"--force to install anyway.".format(req))
raise WheelError(msg)
else:
# We could search on wheel_dirs, but it's probably OK to
# assume the user has made an error.
raise WheelError("No such wheel file: {}".format(req))
continue
# We have a requirement spec
# If we don't have pkg_resources, this will raise an exception
matches = matches_requirement(req, all_wheels)
if not matches:
raise WheelError("No match for requirement {}".format(req))
to_install.append(max(matches))
# We now have a list of wheels to install
if list_files:
sys.stdout.write("Installing:\n")
if dry_run:
return
for wf in to_install:
if list_files:
sys.stdout.write(" {0}\n".format(wf.filename))
continue
wf.install(force=force)
wf.zipfile.close()
def install_scripts(distributions):
"""
Regenerate the entry_points console_scripts for the named distribution.
"""
try:
from setuptools.command import easy_install
import pkg_resources
except ImportError:
raise RuntimeError("'wheel install_scripts' needs setuptools.")
for dist in distributions:
pkg_resources_dist = pkg_resources.get_distribution(dist)
install = wheel.paths.get_install_command(dist)
command = easy_install.easy_install(install.distribution)
command.args = ['wheel'] # dummy argument
command.finalize_options()
command.install_egg_scripts(pkg_resources_dist)
def convert(installers, dest_dir, verbose):
require_pkgresources('wheel convert')
# Only support wheel convert if pkg_resources is present
from ..wininst2wheel import bdist_wininst2wheel
from ..egg2wheel import egg2wheel
for pat in installers:
for installer in iglob(pat):
if os.path.splitext(installer)[1] == '.egg':
conv = egg2wheel
else:
conv = bdist_wininst2wheel
if verbose:
sys.stdout.write("{0}... ".format(installer))
sys.stdout.flush()
conv(installer, dest_dir)
if verbose:
sys.stdout.write("OK\n")
def parser():
p = argparse.ArgumentParser()
s = p.add_subparsers(help="commands")
def keygen_f(args):
keygen()
keygen_parser = s.add_parser('keygen', help='Generate signing key')
keygen_parser.set_defaults(func=keygen_f)
def sign_f(args):
sign(args.wheelfile)
sign_parser = s.add_parser('sign', help='Sign wheel')
sign_parser.add_argument('wheelfile', help='Wheel file')
sign_parser.set_defaults(func=sign_f)
def unsign_f(args):
unsign(args.wheelfile)
unsign_parser = s.add_parser('unsign', help=unsign.__doc__)
unsign_parser.add_argument('wheelfile', help='Wheel file')
unsign_parser.set_defaults(func=unsign_f)
def verify_f(args):
verify(args.wheelfile)
verify_parser = s.add_parser('verify', help=verify.__doc__)
verify_parser.add_argument('wheelfile', help='Wheel file')
verify_parser.set_defaults(func=verify_f)
def unpack_f(args):
unpack(args.wheelfile, args.dest)
unpack_parser = s.add_parser('unpack', help='Unpack wheel')
unpack_parser.add_argument('--dest', '-d', help='Destination directory',
default='.')
unpack_parser.add_argument('wheelfile', help='Wheel file')
unpack_parser.set_defaults(func=unpack_f)
def install_f(args):
install(args.requirements, args.requirements_file,
args.wheel_dirs, args.force, args.list_files)
install_parser = s.add_parser('install', help='Install wheels')
install_parser.add_argument('requirements', nargs='*',
help='Requirements to install.')
install_parser.add_argument('--force', default=False,
action='store_true',
help='Install incompatible wheel files.')
install_parser.add_argument('--wheel-dir', '-d', action='append',
dest='wheel_dirs',
help='Directories containing wheels.')
install_parser.add_argument('--requirements-file', '-r',
help="A file containing requirements to "
"install.")
install_parser.add_argument('--list', '-l', default=False,
dest='list_files',
action='store_true',
help="List wheels which would be installed, "
"but don't actually install anything.")
install_parser.set_defaults(func=install_f)
def install_scripts_f(args):
install_scripts(args.distributions)
install_scripts_parser = s.add_parser('install-scripts', help='Install console_scripts')
install_scripts_parser.add_argument('distributions', nargs='*',
help='Regenerate console_scripts for these distributions')
install_scripts_parser.set_defaults(func=install_scripts_f)
def convert_f(args):
convert(args.installers, args.dest_dir, args.verbose)
convert_parser = s.add_parser('convert', help='Convert egg or wininst to wheel')
convert_parser.add_argument('installers', nargs='*', help='Installers to convert')
convert_parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
help="Directory to store wheels (default %(default)s)")
convert_parser.add_argument('--verbose', '-v', action='store_true')
convert_parser.set_defaults(func=convert_f)
def version_f(args):
from .. import __version__
sys.stdout.write("wheel %s\n" % __version__)
version_parser = s.add_parser('version', help='Print version and exit')
version_parser.set_defaults(func=version_f)
def help_f(args):
p.print_help()
help_parser = s.add_parser('help', help='Show this help')
help_parser.set_defaults(func=help_f)
return p
def main():
p = parser()
args = p.parse_args()
if not hasattr(args, 'func'):
p.print_help()
else:
# XXX on Python 3.3 we get 'args has no func' rather than short help.
try:
args.func(args)
return 0
except WheelError as e:
sys.stderr.write(e.message + "\n")
return 1
|
Qalthos/ansible
|
refs/heads/devel
|
test/units/modules/net_tools/nios/test_nios_network.py
|
27
|
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.net_tools.nios import api
from ansible.modules.net_tools.nios import nios_network
from units.compat.mock import patch, MagicMock, Mock
from units.modules.utils import set_module_args
from .test_nios_module import TestNiosModule, load_fixture
class TestNiosNetworkModule(TestNiosModule):
module = nios_network
def setUp(self):
super(TestNiosNetworkModule, self).setUp()
self.module = MagicMock(name='ansible.modules.net_tools.nios.nios_network.WapiModule')
self.module.check_mode = False
self.module.params = {'provider': None}
self.mock_wapi = patch('ansible.modules.net_tools.nios.nios_network.WapiModule')
self.exec_command = self.mock_wapi.start()
self.mock_wapi_run = patch('ansible.modules.net_tools.nios.nios_network.WapiModule.run')
self.mock_wapi_run.start()
self.load_config = self.mock_wapi_run.start()
def tearDown(self):
super(TestNiosNetworkModule, self).tearDown()
self.mock_wapi.stop()
self.mock_wapi_run.stop()
def load_fixtures(self, commands=None):
self.exec_command.return_value = (0, load_fixture('nios_result.txt').strip(), None)
self.load_config.return_value = dict(diff=None, session='session')
def _get_wapi(self, test_object):
wapi = api.WapiModule(self.module)
wapi.get_object = Mock(name='get_object', return_value=test_object)
wapi.create_object = Mock(name='create_object')
wapi.update_object = Mock(name='update_object')
wapi.delete_object = Mock(name='delete_object')
return wapi
def test_nios_network_ipv4_create(self):
self.module.params = {'provider': None, 'state': 'present', 'network': '192.168.10.0/24',
'comment': None, 'extattrs': None}
test_object = None
test_spec = {
"network": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
print("WAPI: ", wapi)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.create_object.assert_called_once_with('testobject', {'network': '192.168.10.0/24'})
def test_nios_network_ipv4_dhcp_update(self):
self.module.params = {'provider': None, 'state': 'present', 'network': '192.168.10.0/24',
'comment': 'updated comment', 'extattrs': None}
test_object = [
{
"comment": "test comment",
"_ref": "network/ZG5zLm5ldHdvcmtfdmlldyQw:default/true",
"network": "192.168.10.0/24",
"extattrs": {'options': {'name': 'test', 'value': 'ansible.com'}}
}
]
test_spec = {
"network": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
def test_nios_network_ipv6_dhcp_update(self):
self.module.params = {'provider': None, 'state': 'present', 'ipv6network': 'fe80::/64',
'comment': 'updated comment', 'extattrs': None}
test_object = [
{
"comment": "test comment",
"_ref": "ipv6network/ZG5zLm5ldHdvcmtfdmlldyQw:default/true",
"ipv6network": "fe80::/64",
"extattrs": {'options': {'name': 'test', 'value': 'ansible.com'}}
}
]
test_spec = {
"ipv6network": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
def test_nios_network_ipv4_remove(self):
self.module.params = {'provider': None, 'state': 'absent', 'network': '192.168.10.0/24',
'comment': None, 'extattrs': None}
ref = "network/ZG5zLm5ldHdvcmtfdmlldyQw:ansible/false"
test_object = [{
"comment": "test comment",
"_ref": ref,
"network": "192.168.10.0/24",
"extattrs": {'Site': {'value': 'test'}}
}]
test_spec = {
"network": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.delete_object.assert_called_once_with(ref)
def test_nios_network_ipv6_create(self):
self.module.params = {'provider': None, 'state': 'present', 'ipv6network': 'fe80::/64',
'comment': None, 'extattrs': None}
test_object = None
test_spec = {
"ipv6network": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.create_object.assert_called_once_with('testobject', {'ipv6network': 'fe80::/64'})
def test_nios_network_ipv6_remove(self):
self.module.params = {'provider': None, 'state': 'absent', 'ipv6network': 'fe80::/64',
'comment': None, 'extattrs': None}
ref = "ipv6network/ZG5zLm5ldHdvcmtfdmlldyQw:ansible/false"
test_object = [{
"comment": "test comment",
"_ref": ref,
"ipv6network": "fe80::/64",
"extattrs": {'Site': {'value': 'test'}}
}]
test_spec = {
"ipv6network": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.delete_object.assert_called_once_with(ref)
def test_nios_networkcontainer_ipv4_create(self):
self.module.params = {'provider': None, 'state': 'present', 'networkcontainer': '192.168.10.0/24',
'comment': None, 'extattrs': None}
test_object = None
test_spec = {
"networkcontainer": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.create_object.assert_called_once_with('testobject', {'networkcontainer': '192.168.10.0/24'})
def test_nios_networkcontainer_ipv4_remove(self):
self.module.params = {'provider': None, 'state': 'absent', 'networkcontainer': '192.168.10.0/24',
'comment': None, 'extattrs': None}
ref = "networkcontainer/ZG5zLm5ldHdvcmtfdmlldyQw:ansible/false"
test_object = [{
"comment": "test comment",
"_ref": ref,
"networkcontainer": "192.168.10.0/24"
}]
test_spec = {
"networkcontainer": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.delete_object.assert_called_once_with(ref)
def test_nios_networkcontainer_ipv6_create(self):
self.module.params = {'provider': None, 'state': 'present', 'ipv6networkcontainer': 'fe80::/64',
'comment': None, 'extattrs': None}
test_object = None
test_spec = {
"ipv6networkcontainer": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.create_object.assert_called_once_with('testobject', {'ipv6networkcontainer': 'fe80::/64'})
|
klusark/android_external_chromium_org
|
refs/heads/cm-11.0
|
tools/json_to_struct/element_generator.py
|
115
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import struct_generator
def _JSONToCString16(json_string_literal):
"""Converts a JSON string literal to a C++ UTF-16 string literal. This is
done by converting \\u#### to \\x####.
"""
c_string_literal = json_string_literal
escape_index = c_string_literal.find('\\')
while escape_index > 0:
if c_string_literal[escape_index + 1] == 'u':
# We close the C string literal after the 4 hex digits and reopen it right
# after, otherwise the Windows compiler will sometimes try to get more
# than 4 characters in the hex string.
c_string_literal = (c_string_literal[0:escape_index + 1] + 'x' +
c_string_literal[escape_index + 2:escape_index + 6] + '" L"' +
c_string_literal[escape_index + 6:])
escape_index = c_string_literal.find('\\', escape_index + 6)
return c_string_literal
def _GenerateString(content, lines):
"""Generates an UTF-8 string to be included in a static structure initializer.
If content is not specified, uses NULL.
"""
if content is None:
lines.append(' NULL,')
else:
# json.dumps quotes the string and escape characters as required.
lines.append(' %s,' % json.dumps(content))
def _GenerateString16(content, lines):
"""Generates an UTF-16 string to be included in a static structure
initializer. If content is not specified, uses NULL.
"""
if content is None:
lines.append(' NULL,')
else:
# json.dumps quotes the string and escape characters as required.
lines.append(' L%s,' % _JSONToCString16(json.dumps(content)))
def _GenerateArray(element_name, field_info, content, lines):
"""Generates an array to be included in a static structure initializer. If
content is not specified, uses NULL. The array is assigned to a temporary
variable which is initialized before the structure.
"""
if content is None:
lines.append(' NULL,')
lines.append(' 0,') # Size of the array.
return
# Create a new array variable and use it in the structure initializer.
# This prohibits nested arrays. Add a clash detection and renaming mechanism
# to solve the problem.
var = 'array_%s_%s' % (element_name, field_info['field']);
lines.append(' %s,' % var)
lines.append(' %s,' % len(content)) # Size of the array.
# Generate the array content.
array_lines = []
field_info['contents']['field'] = var;
array_lines.append(struct_generator.GenerateField(
field_info['contents']) + '[] = {')
for subcontent in content:
GenerateFieldContent(element_name, field_info['contents'], subcontent,
array_lines)
array_lines.append('};')
# Prepend the generated array so it is initialized before the structure.
lines.reverse()
array_lines.reverse()
lines.extend(array_lines)
lines.reverse()
def GenerateFieldContent(element_name, field_info, content, lines):
"""Generate the content of a field to be included in the static structure
initializer. If the field's content is not specified, uses the default value
if one exists.
"""
if content is None:
content = field_info.get('default', None)
type = field_info['type']
if type == 'int' or type == 'enum':
lines.append(' %s,' % content)
elif type == 'string':
_GenerateString(content, lines)
elif type == 'string16':
_GenerateString16(content, lines)
elif type == 'array':
_GenerateArray(element_name, field_info, content, lines)
else:
raise RuntimeError('Unknown field type "%s"' % type)
def GenerateElement(type_name, schema, element_name, element):
"""Generate the static structure initializer for one element.
"""
lines = [];
lines.append('const %s %s = {' % (type_name, element_name));
for field_info in schema:
content = element.get(field_info['field'], None)
if (content == None and not field_info.get('optional', False)):
raise RuntimeError('Mandatory field "%s" omitted in element "%s".' %
(field_info['field'], element_name))
GenerateFieldContent(element_name, field_info, content, lines)
lines.append('};')
return '\n'.join(lines)
def GenerateElements(type_name, schema, description):
"""Generate the static structure initializer for all the elements in the
description['elements'] dictionary, as well as for any variables in
description['int_variables'].
"""
result = [];
for var_name, value in description.get('int_variables', {}).items():
result.append('const int %s = %s;' % (var_name, value))
result.append('')
for element_name, element in description.get('elements', {}).items():
result.append(GenerateElement(type_name, schema, element_name, element))
result.append('')
return '\n'.join(result)
|
aparo/django-nonrel
|
refs/heads/master
|
tests/regressiontests/admin_validation/models.py
|
11
|
"""
Tests of ModelAdmin validation logic.
"""
from django.db import models
class Album(models.Model):
title = models.CharField(max_length=150)
class Song(models.Model):
title = models.CharField(max_length=150)
album = models.ForeignKey(Album)
original_release = models.DateField(editable=False)
class Meta:
ordering = ('title',)
def __unicode__(self):
return self.title
def readonly_method_on_model(self):
# does nothing
pass
class TwoAlbumFKAndAnE(models.Model):
album1 = models.ForeignKey(Album, related_name="album1_set")
album2 = models.ForeignKey(Album, related_name="album2_set")
e = models.CharField(max_length=1)
class Author(models.Model):
name = models.CharField(max_length=100)
class Book(models.Model):
name = models.CharField(max_length=100)
subtitle = models.CharField(max_length=100)
price = models.FloatField()
authors = models.ManyToManyField(Author, through='AuthorsBooks')
class AuthorsBooks(models.Model):
author = models.ForeignKey(Author)
book = models.ForeignKey(Book)
__test__ = {'API_TESTS':"""
>>> from django import forms
>>> from django.contrib import admin
>>> from django.contrib.admin.validation import validate, validate_inline
# Regression test for #8027: custom ModelForms with fields/fieldsets
>>> class SongForm(forms.ModelForm):
... pass
>>> class ValidFields(admin.ModelAdmin):
... form = SongForm
... fields = ['title']
>>> class InvalidFields(admin.ModelAdmin):
... form = SongForm
... fields = ['spam']
>>> validate(ValidFields, Song)
>>> validate(InvalidFields, Song)
Traceback (most recent call last):
...
ImproperlyConfigured: 'InvalidFields.fields' refers to field 'spam' that is missing from the form.
# Tests for basic validation of 'exclude' option values (#12689)
>>> class ExcludedFields1(admin.ModelAdmin):
... exclude = ('foo')
>>> validate(ExcludedFields1, Book)
Traceback (most recent call last):
...
ImproperlyConfigured: 'ExcludedFields1.exclude' must be a list or tuple.
>>> class ExcludedFields2(admin.ModelAdmin):
... exclude = ('name', 'name')
>>> validate(ExcludedFields2, Book)
Traceback (most recent call last):
...
ImproperlyConfigured: There are duplicate field(s) in ExcludedFields2.exclude
>>> class ExcludedFieldsInline(admin.TabularInline):
... model = Song
... exclude = ('foo')
>>> class ExcludedFieldsAlbumAdmin(admin.ModelAdmin):
... model = Album
... inlines = [ExcludedFieldsInline]
>>> validate(ExcludedFieldsAlbumAdmin, Album)
Traceback (most recent call last):
...
ImproperlyConfigured: 'ExcludedFieldsInline.exclude' must be a list or tuple.
# Regression test for #9932 - exclude in InlineModelAdmin
# should not contain the ForeignKey field used in ModelAdmin.model
>>> class SongInline(admin.StackedInline):
... model = Song
... exclude = ['album']
>>> class AlbumAdmin(admin.ModelAdmin):
... model = Album
... inlines = [SongInline]
>>> validate(AlbumAdmin, Album)
Traceback (most recent call last):
...
ImproperlyConfigured: SongInline cannot exclude the field 'album' - this is the foreign key to the parent model Album.
# Regression test for #11709 - when testing for fk excluding (when exclude is
# given) make sure fk_name is honored or things blow up when there is more
# than one fk to the parent model.
>>> class TwoAlbumFKAndAnEInline(admin.TabularInline):
... model = TwoAlbumFKAndAnE
... exclude = ("e",)
... fk_name = "album1"
>>> validate_inline(TwoAlbumFKAndAnEInline, None, Album)
# Ensure inlines validate that they can be used correctly.
>>> class TwoAlbumFKAndAnEInline(admin.TabularInline):
... model = TwoAlbumFKAndAnE
>>> validate_inline(TwoAlbumFKAndAnEInline, None, Album)
Traceback (most recent call last):
...
Exception: <class 'regressiontests.admin_validation.models.TwoAlbumFKAndAnE'> has more than 1 ForeignKey to <class 'regressiontests.admin_validation.models.Album'>
>>> class TwoAlbumFKAndAnEInline(admin.TabularInline):
... model = TwoAlbumFKAndAnE
... fk_name = "album1"
>>> validate_inline(TwoAlbumFKAndAnEInline, None, Album)
>>> class SongAdmin(admin.ModelAdmin):
... readonly_fields = ("title",)
>>> validate(SongAdmin, Song)
>>> def my_function(obj):
... # does nothing
... pass
>>> class SongAdmin(admin.ModelAdmin):
... readonly_fields = (my_function,)
>>> validate(SongAdmin, Song)
>>> class SongAdmin(admin.ModelAdmin):
... readonly_fields = ("readonly_method_on_modeladmin",)
...
... def readonly_method_on_modeladmin(self, obj):
... # does nothing
... pass
>>> validate(SongAdmin, Song)
>>> class SongAdmin(admin.ModelAdmin):
... readonly_fields = ("readonly_method_on_model",)
>>> validate(SongAdmin, Song)
>>> class SongAdmin(admin.ModelAdmin):
... readonly_fields = ("title", "nonexistant")
>>> validate(SongAdmin, Song)
Traceback (most recent call last):
...
ImproperlyConfigured: SongAdmin.readonly_fields[1], 'nonexistant' is not a callable or an attribute of 'SongAdmin' or found in the model 'Song'.
>>> class SongAdmin(admin.ModelAdmin):
... readonly_fields = ("title", "awesome_song")
... fields = ("album", "title", "awesome_song")
>>> validate(SongAdmin, Song)
Traceback (most recent call last):
...
ImproperlyConfigured: SongAdmin.readonly_fields[1], 'awesome_song' is not a callable or an attribute of 'SongAdmin' or found in the model 'Song'.
>>> class SongAdmin(SongAdmin):
... def awesome_song(self, instance):
... if instance.title == "Born to Run":
... return "Best Ever!"
... return "Status unknown."
>>> validate(SongAdmin, Song)
>>> class SongAdmin(admin.ModelAdmin):
... readonly_fields = (lambda obj: "test",)
>>> validate(SongAdmin, Song)
# Regression test for #12203/#12237 - Fail more gracefully when a M2M field that
# specifies the 'through' option is included in the 'fields' or the 'fieldsets'
# ModelAdmin options.
>>> class BookAdmin(admin.ModelAdmin):
... fields = ['authors']
>>> validate(BookAdmin, Book)
Traceback (most recent call last):
...
ImproperlyConfigured: 'BookAdmin.fields' can't include the ManyToManyField field 'authors' because 'authors' manually specifies a 'through' model.
>>> class FieldsetBookAdmin(admin.ModelAdmin):
... fieldsets = (
... ('Header 1', {'fields': ('name',)}),
... ('Header 2', {'fields': ('authors',)}),
... )
>>> validate(FieldsetBookAdmin, Book)
Traceback (most recent call last):
...
ImproperlyConfigured: 'FieldsetBookAdmin.fieldsets[1][1]['fields']' can't include the ManyToManyField field 'authors' because 'authors' manually specifies a 'through' model.
>>> class NestedFieldsetAdmin(admin.ModelAdmin):
... fieldsets = (
... ('Main', {'fields': ('price', ('name', 'subtitle'))}),
... )
>>> validate(NestedFieldsetAdmin, Book)
# Regression test for #12209 -- If the explicitly provided through model
# is specified as a string, the admin should still be able use
# Model.m2m_field.through
>>> class AuthorsInline(admin.TabularInline):
... model = Book.authors.through
>>> class BookAdmin(admin.ModelAdmin):
... inlines = [AuthorsInline]
# If the through model is still a string (and hasn't been resolved to a model)
# the validation will fail.
>>> validate(BookAdmin, Book)
# Regression for ensuring ModelAdmin.fields can contain non-model fields
# that broke with r11737
>>> class SongForm(forms.ModelForm):
... extra_data = forms.CharField()
... class Meta:
... model = Song
>>> class FieldsOnFormOnlyAdmin(admin.ModelAdmin):
... form = SongForm
... fields = ['title', 'extra_data']
>>> validate(FieldsOnFormOnlyAdmin, Song)
"""}
|
miguelpalacio/python-for-android
|
refs/heads/master
|
python-modules/twisted/twisted/web/test/test_newclient.py
|
49
|
# Copyright (c) 2009-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.web._newclient}.
"""
__metaclass__ = type
from zope.interface import implements
from zope.interface.verify import verifyObject
from twisted.python import log
from twisted.python.failure import Failure
from twisted.internet.interfaces import IConsumer, IPushProducer
from twisted.internet.error import ConnectionDone
from twisted.internet.defer import Deferred, succeed, fail
from twisted.internet.protocol import Protocol
from twisted.trial.unittest import TestCase
from twisted.test.proto_helpers import StringTransport, AccumulatingProtocol
from twisted.web._newclient import UNKNOWN_LENGTH, STATUS, HEADER, BODY, DONE
from twisted.web._newclient import Request, Response, HTTPParser, HTTPClientParser
from twisted.web._newclient import BadResponseVersion, ParseError, HTTP11ClientProtocol
from twisted.web._newclient import ChunkedEncoder, RequestGenerationFailed
from twisted.web._newclient import RequestTransmissionFailed, ResponseFailed
from twisted.web._newclient import WrongBodyLength, RequestNotSent
from twisted.web._newclient import ConnectionAborted
from twisted.web._newclient import BadHeaders, ResponseDone, PotentialDataLoss, ExcessWrite
from twisted.web._newclient import TransportProxyProducer, LengthEnforcingConsumer, makeStatefulDispatcher
from twisted.web.http_headers import Headers
from twisted.web.http import _DataLoss
from twisted.web.iweb import IBodyProducer
class ArbitraryException(Exception):
"""
A unique, arbitrary exception type which L{twisted.web._newclient} knows
nothing about.
"""
class AnotherArbitraryException(Exception):
"""
Similar to L{ArbitraryException} but with a different identity.
"""
# A re-usable Headers instance for tests which don't really care what headers
# they're sending.
_boringHeaders = Headers({'host': ['example.com']})
def assertWrapperExceptionTypes(self, deferred, mainType, reasonTypes):
"""
Assert that the given L{Deferred} fails with the exception given by
C{mainType} and that the exceptions wrapped by the instance of C{mainType}
it fails with match the list of exception types given by C{reasonTypes}.
This is a helper for testing failures of exceptions which subclass
L{_newclient._WrapperException}.
@param self: A L{TestCase} instance which will be used to make the
assertions.
@param deferred: The L{Deferred} which is expected to fail with
C{mainType}.
@param mainType: A L{_newclient._WrapperException} subclass which will be
trapped on C{deferred}.
@param reasonTypes: A sequence of exception types which will be trapped on
the resulting L{mainType} exception instance's C{reasons} sequence.
@return: A L{Deferred} which fires with the C{mainType} instance
C{deferred} fails with, or which fails somehow.
"""
def cbFailed(err):
for reason, type in zip(err.reasons, reasonTypes):
reason.trap(type)
self.assertEqual(len(err.reasons), len(reasonTypes),
"len(%s) != len(%s)" % (err.reasons, reasonTypes))
return err
d = self.assertFailure(deferred, mainType)
d.addCallback(cbFailed)
return d
def assertResponseFailed(self, deferred, reasonTypes):
"""
A simple helper to invoke L{assertWrapperExceptionTypes} with a C{mainType}
of L{ResponseFailed}.
"""
return assertWrapperExceptionTypes(self, deferred, ResponseFailed, reasonTypes)
def assertRequestGenerationFailed(self, deferred, reasonTypes):
"""
A simple helper to invoke L{assertWrapperExceptionTypes} with a C{mainType}
of L{RequestGenerationFailed}.
"""
return assertWrapperExceptionTypes(self, deferred, RequestGenerationFailed, reasonTypes)
def assertRequestTransmissionFailed(self, deferred, reasonTypes):
"""
A simple helper to invoke L{assertWrapperExceptionTypes} with a C{mainType}
of L{RequestTransmissionFailed}.
"""
return assertWrapperExceptionTypes(self, deferred, RequestTransmissionFailed, reasonTypes)
def justTransportResponse(transport):
"""
Helper function for creating a Response which uses the given transport.
All of the other parameters to L{Response.__init__} are filled with
arbitrary values. Only use this method if you don't care about any of
them.
"""
return Response(('HTTP', 1, 1), 200, 'OK', _boringHeaders, transport)
class MakeStatefulDispatcherTests(TestCase):
"""
Tests for L{makeStatefulDispatcher}.
"""
def test_functionCalledByState(self):
"""
A method defined with L{makeStatefulDispatcher} invokes a second
method based on the current state of the object.
"""
class Foo:
_state = 'A'
def bar(self):
pass
bar = makeStatefulDispatcher('quux', bar)
def _quux_A(self):
return 'a'
def _quux_B(self):
return 'b'
stateful = Foo()
self.assertEqual(stateful.bar(), 'a')
stateful._state = 'B'
self.assertEqual(stateful.bar(), 'b')
stateful._state = 'C'
self.assertRaises(RuntimeError, stateful.bar)
class HTTPParserTests(TestCase):
"""
Tests for L{HTTPParser} which is responsible for the bulk of the task of
parsing HTTP bytes.
"""
def test_statusCallback(self):
"""
L{HTTPParser} calls its C{statusReceived} method when it receives a
status line.
"""
status = []
protocol = HTTPParser()
protocol.statusReceived = status.append
protocol.makeConnection(StringTransport())
self.assertEqual(protocol.state, STATUS)
protocol.dataReceived('HTTP/1.1 200 OK\r\n')
self.assertEqual(status, ['HTTP/1.1 200 OK'])
self.assertEqual(protocol.state, HEADER)
def _headerTestSetup(self):
header = {}
protocol = HTTPParser()
protocol.headerReceived = header.__setitem__
protocol.makeConnection(StringTransport())
protocol.dataReceived('HTTP/1.1 200 OK\r\n')
return header, protocol
def test_headerCallback(self):
"""
L{HTTPParser} calls its C{headerReceived} method when it receives a
header.
"""
header, protocol = self._headerTestSetup()
protocol.dataReceived('X-Foo:bar\r\n')
# Cannot tell it's not a continue header until the next line arrives
# and is not a continuation
protocol.dataReceived('\r\n')
self.assertEqual(header, {'X-Foo': 'bar'})
self.assertEqual(protocol.state, BODY)
def test_continuedHeaderCallback(self):
"""
If a header is split over multiple lines, L{HTTPParser} calls
C{headerReceived} with the entire value once it is received.
"""
header, protocol = self._headerTestSetup()
protocol.dataReceived('X-Foo: bar\r\n')
protocol.dataReceived(' baz\r\n')
protocol.dataReceived('\tquux\r\n')
protocol.dataReceived('\r\n')
self.assertEqual(header, {'X-Foo': 'bar baz\tquux'})
self.assertEqual(protocol.state, BODY)
def test_fieldContentWhitespace(self):
"""
Leading and trailing linear whitespace is stripped from the header
value passed to the C{headerReceived} callback.
"""
header, protocol = self._headerTestSetup()
value = ' \t \r\n bar \t\r\n \t\r\n'
protocol.dataReceived('X-Bar:' + value)
protocol.dataReceived('X-Foo:' + value)
protocol.dataReceived('\r\n')
self.assertEqual(header, {'X-Foo': 'bar',
'X-Bar': 'bar'})
def test_allHeadersCallback(self):
"""
After the last header is received, L{HTTPParser} calls
C{allHeadersReceived}.
"""
called = []
header, protocol = self._headerTestSetup()
def allHeadersReceived():
called.append(protocol.state)
protocol.state = STATUS
protocol.allHeadersReceived = allHeadersReceived
protocol.dataReceived('\r\n')
self.assertEqual(called, [HEADER])
self.assertEqual(protocol.state, STATUS)
def test_noHeaderCallback(self):
"""
If there are no headers in the message, L{HTTPParser} does not call
C{headerReceived}.
"""
header, protocol = self._headerTestSetup()
protocol.dataReceived('\r\n')
self.assertEqual(header, {})
self.assertEqual(protocol.state, BODY)
def test_headersSavedOnResponse(self):
"""
All headers received by L{HTTPParser} are added to
L{HTTPParser.headers}.
"""
protocol = HTTPParser()
protocol.makeConnection(StringTransport())
protocol.dataReceived('HTTP/1.1 200 OK\r\n')
protocol.dataReceived('X-Foo: bar\r\n')
protocol.dataReceived('X-Foo: baz\r\n')
protocol.dataReceived('\r\n')
self.assertEqual(
list(protocol.headers.getAllRawHeaders()),
[('X-Foo', ['bar', 'baz'])])
def test_connectionControlHeaders(self):
"""
L{HTTPParser.isConnectionControlHeader} returns C{True} for headers
which are always connection control headers (similar to "hop-by-hop"
headers from RFC 2616 section 13.5.1) and C{False} for other headers.
"""
protocol = HTTPParser()
connHeaderNames = [
'content-length', 'connection', 'keep-alive', 'te', 'trailers',
'transfer-encoding', 'upgrade', 'proxy-connection']
for header in connHeaderNames:
self.assertTrue(
protocol.isConnectionControlHeader(header),
"Expecting %r to be a connection control header, but "
"wasn't" % (header,))
self.assertFalse(
protocol.isConnectionControlHeader("date"),
"Expecting the arbitrarily selected 'date' header to not be "
"a connection control header, but was.")
def test_switchToBodyMode(self):
"""
L{HTTPParser.switchToBodyMode} raises L{RuntimeError} if called more
than once.
"""
protocol = HTTPParser()
protocol.makeConnection(StringTransport())
protocol.switchToBodyMode(object())
self.assertRaises(RuntimeError, protocol.switchToBodyMode, object())
class HTTPClientParserTests(TestCase):
"""
Tests for L{HTTPClientParser} which is responsible for parsing HTTP
response messages.
"""
def test_parseVersion(self):
"""
L{HTTPClientParser.parseVersion} parses a status line into its three
components.
"""
protocol = HTTPClientParser(None, None)
self.assertEqual(
protocol.parseVersion('CANDY/7.2'),
('CANDY', 7, 2))
def test_parseBadVersion(self):
"""
L{HTTPClientParser.parseVersion} raises L{ValueError} when passed an
unparsable version.
"""
protocol = HTTPClientParser(None, None)
e = BadResponseVersion
f = protocol.parseVersion
def checkParsing(s):
exc = self.assertRaises(e, f, s)
self.assertEqual(exc.data, s)
checkParsing('foo')
checkParsing('foo/bar/baz')
checkParsing('foo/')
checkParsing('foo/..')
checkParsing('foo/a.b')
checkParsing('foo/-1.-1')
def test_responseStatusParsing(self):
"""
L{HTTPClientParser.statusReceived} parses the version, code, and phrase
from the status line and stores them on the response object.
"""
request = Request('GET', '/', _boringHeaders, None)
protocol = HTTPClientParser(request, None)
protocol.makeConnection(StringTransport())
protocol.dataReceived('HTTP/1.1 200 OK\r\n')
self.assertEqual(protocol.response.version, ('HTTP', 1, 1))
self.assertEqual(protocol.response.code, 200)
self.assertEqual(protocol.response.phrase, 'OK')
def test_badResponseStatus(self):
"""
L{HTTPClientParser.statusReceived} raises L{ParseError} if it is called
with a status line which cannot be parsed.
"""
protocol = HTTPClientParser(None, None)
def checkParsing(s):
exc = self.assertRaises(ParseError, protocol.statusReceived, s)
self.assertEqual(exc.data, s)
# If there are fewer than three whitespace-delimited parts to the
# status line, it is not valid and cannot be parsed.
checkParsing('foo')
checkParsing('HTTP/1.1 200')
# If the response code is not an integer, the status line is not valid
# and cannot be parsed.
checkParsing('HTTP/1.1 bar OK')
def _noBodyTest(self, request, response):
"""
Assert that L{HTTPClientParser} parses the given C{response} to
C{request}, resulting in a response with no body and no extra bytes and
leaving the transport in the producing state.
@param request: A L{Request} instance which might have caused a server
to return the given response.
@param response: A string giving the response to be parsed.
@return: A C{dict} of headers from the response.
"""
header = {}
finished = []
protocol = HTTPClientParser(request, finished.append)
protocol.headerReceived = header.__setitem__
body = []
protocol._bodyDataReceived = body.append
transport = StringTransport()
protocol.makeConnection(transport)
protocol.dataReceived(response)
self.assertEqual(transport.producerState, 'producing')
self.assertEqual(protocol.state, DONE)
self.assertEqual(body, [])
self.assertEqual(finished, [''])
self.assertEqual(protocol.response.length, 0)
return header
def test_headResponse(self):
"""
If the response is to a HEAD request, no body is expected, the body
callback is not invoked, and the I{Content-Length} header is passed to
the header callback.
"""
request = Request('HEAD', '/', _boringHeaders, None)
status = (
'HTTP/1.1 200 OK\r\n'
'Content-Length: 10\r\n'
'\r\n')
header = self._noBodyTest(request, status)
self.assertEqual(header, {'Content-Length': '10'})
def test_noContentResponse(self):
"""
If the response code is I{NO CONTENT} (204), no body is expected and
the body callback is not invoked.
"""
request = Request('GET', '/', _boringHeaders, None)
status = (
'HTTP/1.1 204 NO CONTENT\r\n'
'\r\n')
self._noBodyTest(request, status)
def test_notModifiedResponse(self):
"""
If the response code is I{NOT MODIFIED} (304), no body is expected and
the body callback is not invoked.
"""
request = Request('GET', '/', _boringHeaders, None)
status = (
'HTTP/1.1 304 NOT MODIFIED\r\n'
'\r\n')
self._noBodyTest(request, status)
def test_responseHeaders(self):
"""
The response headers are added to the response object's C{headers}
L{Headers} instance.
"""
protocol = HTTPClientParser(
Request('GET', '/', _boringHeaders, None),
lambda rest: None)
protocol.makeConnection(StringTransport())
protocol.dataReceived('HTTP/1.1 200 OK\r\n')
protocol.dataReceived('X-Foo: bar\r\n')
protocol.dataReceived('\r\n')
self.assertEqual(
protocol.connHeaders,
Headers({}))
self.assertEqual(
protocol.response.headers,
Headers({'x-foo': ['bar']}))
self.assertIdentical(protocol.response.length, UNKNOWN_LENGTH)
def test_connectionHeaders(self):
"""
The connection control headers are added to the parser's C{connHeaders}
L{Headers} instance.
"""
protocol = HTTPClientParser(
Request('GET', '/', _boringHeaders, None),
lambda rest: None)
protocol.makeConnection(StringTransport())
protocol.dataReceived('HTTP/1.1 200 OK\r\n')
protocol.dataReceived('Content-Length: 123\r\n')
protocol.dataReceived('Connection: close\r\n')
protocol.dataReceived('\r\n')
self.assertEqual(
protocol.response.headers,
Headers({}))
self.assertEqual(
protocol.connHeaders,
Headers({'content-length': ['123'],
'connection': ['close']}))
self.assertEqual(protocol.response.length, 123)
def test_headResponseContentLengthEntityHeader(self):
"""
If a HEAD request is made, the I{Content-Length} header in the response
is added to the response headers, not the connection control headers.
"""
protocol = HTTPClientParser(
Request('HEAD', '/', _boringHeaders, None),
lambda rest: None)
protocol.makeConnection(StringTransport())
protocol.dataReceived('HTTP/1.1 200 OK\r\n')
protocol.dataReceived('Content-Length: 123\r\n')
protocol.dataReceived('\r\n')
self.assertEqual(
protocol.response.headers,
Headers({'content-length': ['123']}))
self.assertEqual(
protocol.connHeaders,
Headers({}))
self.assertEqual(protocol.response.length, 0)
def test_contentLength(self):
"""
If a response includes a body with a length given by the
I{Content-Length} header, the bytes which make up the body are passed
to the C{_bodyDataReceived} callback on the L{HTTPParser}.
"""
finished = []
protocol = HTTPClientParser(
Request('GET', '/', _boringHeaders, None),
finished.append)
transport = StringTransport()
protocol.makeConnection(transport)
protocol.dataReceived('HTTP/1.1 200 OK\r\n')
body = []
protocol.response._bodyDataReceived = body.append
protocol.dataReceived('Content-Length: 10\r\n')
protocol.dataReceived('\r\n')
# Incidentally, the transport should be paused now. It is the response
# object's responsibility to resume this when it is ready for bytes.
self.assertEqual(transport.producerState, 'paused')
self.assertEqual(protocol.state, BODY)
protocol.dataReceived('x' * 6)
self.assertEqual(body, ['x' * 6])
self.assertEqual(protocol.state, BODY)
protocol.dataReceived('y' * 4)
self.assertEqual(body, ['x' * 6, 'y' * 4])
self.assertEqual(protocol.state, DONE)
self.assertTrue(finished, [''])
def test_zeroContentLength(self):
"""
If a response includes a I{Content-Length} header indicating zero bytes
in the response, L{Response.length} is set accordingly and no data is
delivered to L{Response._bodyDataReceived}.
"""
finished = []
protocol = HTTPClientParser(
Request('GET', '/', _boringHeaders, None),
finished.append)
protocol.makeConnection(StringTransport())
protocol.dataReceived('HTTP/1.1 200 OK\r\n')
body = []
protocol.response._bodyDataReceived = body.append
protocol.dataReceived('Content-Length: 0\r\n')
protocol.dataReceived('\r\n')
self.assertEqual(protocol.state, DONE)
self.assertEqual(body, [])
self.assertTrue(finished, [''])
self.assertEqual(protocol.response.length, 0)
def test_multipleContentLengthHeaders(self):
"""
If a response includes multiple I{Content-Length} headers,
L{HTTPClientParser.dataReceived} raises L{ValueError} to indicate that
the response is invalid and the transport is now unusable.
"""
protocol = HTTPClientParser(
Request('GET', '/', _boringHeaders, None),
None)
protocol.makeConnection(StringTransport())
self.assertRaises(
ValueError,
protocol.dataReceived,
'HTTP/1.1 200 OK\r\n'
'Content-Length: 1\r\n'
'Content-Length: 2\r\n'
'\r\n')
def test_extraBytesPassedBack(self):
"""
If extra bytes are received past the end of a response, they are passed
to the finish callback.
"""
finished = []
protocol = HTTPClientParser(
Request('GET', '/', _boringHeaders, None),
finished.append)
protocol.makeConnection(StringTransport())
protocol.dataReceived('HTTP/1.1 200 OK\r\n')
protocol.dataReceived('Content-Length: 0\r\n')
protocol.dataReceived('\r\nHere is another thing!')
self.assertEqual(protocol.state, DONE)
self.assertEqual(finished, ['Here is another thing!'])
def test_extraBytesPassedBackHEAD(self):
"""
If extra bytes are received past the end of the headers of a response
to a HEAD request, they are passed to the finish callback.
"""
finished = []
protocol = HTTPClientParser(
Request('HEAD', '/', _boringHeaders, None),
finished.append)
protocol.makeConnection(StringTransport())
protocol.dataReceived('HTTP/1.1 200 OK\r\n')
protocol.dataReceived('Content-Length: 12\r\n')
protocol.dataReceived('\r\nHere is another thing!')
self.assertEqual(protocol.state, DONE)
self.assertEqual(finished, ['Here is another thing!'])
def test_chunkedResponseBody(self):
"""
If the response headers indicate the response body is encoded with the
I{chunked} transfer encoding, the body is decoded according to that
transfer encoding before being passed to L{Response._bodyDataReceived}.
"""
finished = []
protocol = HTTPClientParser(
Request('GET', '/', _boringHeaders, None),
finished.append)
protocol.makeConnection(StringTransport())
protocol.dataReceived('HTTP/1.1 200 OK\r\n')
body = []
protocol.response._bodyDataReceived = body.append
protocol.dataReceived('Transfer-Encoding: chunked\r\n')
protocol.dataReceived('\r\n')
# No data delivered yet
self.assertEqual(body, [])
# Cannot predict the length of a chunked encoded response body.
self.assertIdentical(protocol.response.length, UNKNOWN_LENGTH)
# Deliver some chunks and make sure the data arrives
protocol.dataReceived('3\r\na')
self.assertEqual(body, ['a'])
protocol.dataReceived('bc\r\n')
self.assertEqual(body, ['a', 'bc'])
# The response's _bodyDataFinished method should be called when the last
# chunk is received. Extra data should be passed to the finished
# callback.
protocol.dataReceived('0\r\n\r\nextra')
self.assertEqual(finished, ['extra'])
def test_unknownContentLength(self):
"""
If a response does not include a I{Transfer-Encoding} or a
I{Content-Length}, the end of response body is indicated by the
connection being closed.
"""
finished = []
protocol = HTTPClientParser(
Request('GET', '/', _boringHeaders, None), finished.append)
transport = StringTransport()
protocol.makeConnection(transport)
protocol.dataReceived('HTTP/1.1 200 OK\r\n')
body = []
protocol.response._bodyDataReceived = body.append
protocol.dataReceived('\r\n')
protocol.dataReceived('foo')
protocol.dataReceived('bar')
self.assertEqual(body, ['foo', 'bar'])
protocol.connectionLost(ConnectionDone("simulated end of connection"))
self.assertEqual(finished, [''])
def test_contentLengthAndTransferEncoding(self):
"""
According to RFC 2616, section 4.4, point 3, if I{Content-Length} and
I{Transfer-Encoding: chunked} are present, I{Content-Length} MUST be
ignored
"""
finished = []
protocol = HTTPClientParser(
Request('GET', '/', _boringHeaders, None), finished.append)
transport = StringTransport()
protocol.makeConnection(transport)
protocol.dataReceived('HTTP/1.1 200 OK\r\n')
body = []
protocol.response._bodyDataReceived = body.append
protocol.dataReceived(
'Content-Length: 102\r\n'
'Transfer-Encoding: chunked\r\n'
'\r\n'
'3\r\n'
'abc\r\n'
'0\r\n'
'\r\n')
self.assertEqual(body, ['abc'])
self.assertEqual(finished, [''])
def test_connectionLostBeforeBody(self):
"""
If L{HTTPClientParser.connectionLost} is called before the headers are
finished, the C{_responseDeferred} is fired with the L{Failure} passed
to C{connectionLost}.
"""
transport = StringTransport()
protocol = HTTPClientParser(Request('GET', '/', _boringHeaders, None), None)
protocol.makeConnection(transport)
# Grab this here because connectionLost gets rid of the attribute
responseDeferred = protocol._responseDeferred
protocol.connectionLost(Failure(ArbitraryException()))
return assertResponseFailed(
self, responseDeferred, [ArbitraryException])
def test_connectionLostWithError(self):
"""
If one of the L{Response} methods called by
L{HTTPClientParser.connectionLost} raises an exception, the exception
is logged and not re-raised.
"""
transport = StringTransport()
protocol = HTTPClientParser(Request('GET', '/', _boringHeaders, None),
None)
protocol.makeConnection(transport)
response = []
protocol._responseDeferred.addCallback(response.append)
protocol.dataReceived(
'HTTP/1.1 200 OK\r\n'
'Content-Length: 1\r\n'
'\r\n')
response = response[0]
# Arrange for an exception
def fakeBodyDataFinished(err=None):
raise ArbitraryException()
response._bodyDataFinished = fakeBodyDataFinished
protocol.connectionLost(None)
self.assertEqual(len(self.flushLoggedErrors(ArbitraryException)), 1)
class SlowRequest:
"""
L{SlowRequest} is a fake implementation of L{Request} which is easily
controlled externally (for example, by code in a test method).
@ivar stopped: A flag indicating whether C{stopWriting} has been called.
@ivar finished: After C{writeTo} is called, a L{Deferred} which was
returned by that method. L{SlowRequest} will never fire this
L{Deferred}.
"""
method = 'GET'
stopped = False
def writeTo(self, transport):
self.finished = Deferred()
return self.finished
def stopWriting(self):
self.stopped = True
class SimpleRequest:
"""
L{SimpleRequest} is a fake implementation of L{Request} which writes a
short, fixed string to the transport passed to its C{writeTo} method and
returns a succeeded L{Deferred}. This vaguely emulates the behavior of a
L{Request} with no body producer.
"""
def writeTo(self, transport):
transport.write('SOME BYTES')
return succeed(None)
class HTTP11ClientProtocolTests(TestCase):
"""
Tests for the HTTP 1.1 client protocol implementation,
L{HTTP11ClientProtocol}.
"""
def setUp(self):
"""
Create an L{HTTP11ClientProtocol} connected to a fake transport.
"""
self.transport = StringTransport()
self.protocol = HTTP11ClientProtocol()
self.protocol.makeConnection(self.transport)
def test_request(self):
"""
L{HTTP11ClientProtocol.request} accepts a L{Request} and calls its
C{writeTo} method with its own transport.
"""
self.protocol.request(SimpleRequest())
self.assertEqual(self.transport.value(), 'SOME BYTES')
def test_secondRequest(self):
"""
The second time L{HTTP11ClientProtocol.request} is called, it returns a
L{Deferred} which immediately fires with a L{Failure} wrapping a
L{RequestNotSent} exception.
"""
self.protocol.request(SlowRequest())
def cbNotSent(ignored):
self.assertEqual(self.transport.value(), '')
d = self.assertFailure(
self.protocol.request(SimpleRequest()), RequestNotSent)
d.addCallback(cbNotSent)
return d
def test_requestAfterConnectionLost(self):
"""
L{HTTP11ClientProtocol.request} returns a L{Deferred} which immediately
fires with a L{Failure} wrapping a L{RequestNotSent} if called after
the protocol has been disconnected.
"""
self.protocol.connectionLost(
Failure(ConnectionDone("sad transport")))
def cbNotSent(ignored):
self.assertEqual(self.transport.value(), '')
d = self.assertFailure(
self.protocol.request(SimpleRequest()), RequestNotSent)
d.addCallback(cbNotSent)
return d
def test_failedWriteTo(self):
"""
If the L{Deferred} returned by L{Request.writeTo} fires with a
L{Failure}, L{HTTP11ClientProtocol.request} disconnects its transport
and returns a L{Deferred} which fires with a L{Failure} of
L{RequestGenerationFailed} wrapping the underlying failure.
"""
class BrokenRequest:
def writeTo(self, transport):
return fail(ArbitraryException())
d = self.protocol.request(BrokenRequest())
def cbFailed(ignored):
self.assertTrue(self.transport.disconnecting)
# Simulate what would happen if the protocol had a real transport
# and make sure no exception is raised.
self.protocol.connectionLost(
Failure(ConnectionDone("you asked for it")))
d = assertRequestGenerationFailed(self, d, [ArbitraryException])
d.addCallback(cbFailed)
return d
def test_synchronousWriteToError(self):
"""
If L{Request.writeTo} raises an exception,
L{HTTP11ClientProtocol.request} returns a L{Deferred} which fires with
a L{Failure} of L{RequestGenerationFailed} wrapping that exception.
"""
class BrokenRequest:
def writeTo(self, transport):
raise ArbitraryException()
d = self.protocol.request(BrokenRequest())
return assertRequestGenerationFailed(self, d, [ArbitraryException])
def test_connectionLostDuringRequestGeneration(self, mode=None):
"""
If L{HTTP11ClientProtocol}'s transport is disconnected before the
L{Deferred} returned by L{Request.writeTo} fires, the L{Deferred}
returned by L{HTTP11ClientProtocol.request} fires with a L{Failure} of
L{RequestTransmissionFailed} wrapping the underlying failure.
"""
request = SlowRequest()
d = self.protocol.request(request)
d = assertRequestTransmissionFailed(self, d, [ArbitraryException])
# The connection hasn't been lost yet. The request should still be
# allowed to do its thing.
self.assertFalse(request.stopped)
self.protocol.connectionLost(Failure(ArbitraryException()))
# Now the connection has been lost. The request should have been told
# to stop writing itself.
self.assertTrue(request.stopped)
if mode == 'callback':
request.finished.callback(None)
elif mode == 'errback':
request.finished.errback(Failure(AnotherArbitraryException()))
errors = self.flushLoggedErrors(AnotherArbitraryException)
self.assertEqual(len(errors), 1)
else:
# Don't fire the writeTo Deferred at all.
pass
return d
def test_connectionLostBeforeGenerationFinished(self):
"""
If the request passed to L{HTTP11ClientProtocol} finishes generation
successfully after the L{HTTP11ClientProtocol}'s connection has been
lost, nothing happens.
"""
return self.test_connectionLostDuringRequestGeneration('callback')
def test_connectionLostBeforeGenerationFailed(self):
"""
If the request passed to L{HTTP11ClientProtocol} finished generation
with an error after the L{HTTP11ClientProtocol}'s connection has been
lost, nothing happens.
"""
return self.test_connectionLostDuringRequestGeneration('errback')
def test_errorMessageOnConnectionLostBeforeGenerationFailedDoesNotConfuse(self):
"""
If the request passed to L{HTTP11ClientProtocol} finished generation
with an error after the L{HTTP11ClientProtocol}'s connection has been
lost, an error is logged that gives a non-confusing hint to user on what
went wrong.
"""
errors = []
log.addObserver(errors.append)
self.addCleanup(log.removeObserver, errors.append)
def check(ignore):
error = errors[0]
self.assertEquals(error['why'],
'Error writing request, but not in valid state '
'to finalize request: CONNECTION_LOST')
return self.test_connectionLostDuringRequestGeneration(
'errback').addCallback(check)
def test_receiveSimplestResponse(self):
"""
When a response is delivered to L{HTTP11ClientProtocol}, the
L{Deferred} previously returned by the C{request} method is called back
with a L{Response} instance and the connection is closed.
"""
d = self.protocol.request(Request('GET', '/', _boringHeaders, None))
def cbRequest(response):
self.assertEqual(response.code, 200)
self.assertEqual(response.headers, Headers())
self.assertTrue(self.transport.disconnecting)
d.addCallback(cbRequest)
self.protocol.dataReceived(
"HTTP/1.1 200 OK\r\n"
"Content-Length: 0\r\n"
"\r\n")
return d
def test_receiveResponseHeaders(self):
"""
The headers included in a response delivered to L{HTTP11ClientProtocol}
are included on the L{Response} instance passed to the callback
returned by the C{request} method.
"""
d = self.protocol.request(Request('GET', '/', _boringHeaders, None))
def cbRequest(response):
expected = Headers({'x-foo': ['bar', 'baz']})
self.assertEqual(response.headers, expected)
d.addCallback(cbRequest)
self.protocol.dataReceived(
"HTTP/1.1 200 OK\r\n"
"X-Foo: bar\r\n"
"X-Foo: baz\r\n"
"\r\n")
return d
def test_receiveResponseBeforeRequestGenerationDone(self):
"""
If response bytes are delivered to L{HTTP11ClientProtocol} before the
L{Deferred} returned by L{Request.writeTo} fires, those response bytes
are parsed as part of the response.
"""
request = SlowRequest()
d = self.protocol.request(request)
self.protocol.dataReceived(
"HTTP/1.1 200 OK\r\n"
"X-Foo: bar\r\n"
"Content-Length: 6\r\n"
"\r\n"
"foobar")
def cbResponse(response):
p = AccumulatingProtocol()
whenFinished = p.closedDeferred = Deferred()
response.deliverBody(p)
return whenFinished.addCallback(
lambda ign: (response, p.data))
d.addCallback(cbResponse)
def cbAllResponse((response, body)):
self.assertEqual(response.version, ('HTTP', 1, 1))
self.assertEqual(response.code, 200)
self.assertEqual(response.phrase, 'OK')
self.assertEqual(response.headers, Headers({'x-foo': ['bar']}))
self.assertEqual(body, "foobar")
# Also nothing bad should happen if the request does finally
# finish, even though it is completely irrelevant.
request.finished.callback(None)
d.addCallback(cbAllResponse)
return d
def test_receiveResponseBody(self):
"""
The C{deliverBody} method of the response object with which the
L{Deferred} returned by L{HTTP11ClientProtocol.request} fires can be
used to get the body of the response.
"""
protocol = AccumulatingProtocol()
whenFinished = protocol.closedDeferred = Deferred()
requestDeferred = self.protocol.request(Request('GET', '/', _boringHeaders, None))
self.protocol.dataReceived(
"HTTP/1.1 200 OK\r\n"
"Content-Length: 6\r\n"
"\r")
# Here's what's going on: all the response headers have been delivered
# by this point, so the request Deferred can fire with a Response
# object. The body is yet to come, but that's okay, because the
# Response object is how you *get* the body.
result = []
requestDeferred.addCallback(result.append)
self.assertEqual(result, [])
# Deliver the very last byte of the response. It is exactly at this
# point which the Deferred returned by request should fire.
self.protocol.dataReceived("\n")
response = result[0]
response.deliverBody(protocol)
self.protocol.dataReceived("foo")
self.protocol.dataReceived("bar")
def cbAllResponse(ignored):
self.assertEqual(protocol.data, "foobar")
protocol.closedReason.trap(ResponseDone)
whenFinished.addCallback(cbAllResponse)
return whenFinished
def test_responseBodyFinishedWhenConnectionLostWhenContentLengthIsUnknown(
self):
"""
If the length of the response body is unknown, the protocol passed to
the response's C{deliverBody} method has its C{connectionLost}
method called with a L{Failure} wrapping a L{PotentialDataLoss}
exception.
"""
requestDeferred = self.protocol.request(Request('GET', '/', _boringHeaders, None))
self.protocol.dataReceived(
"HTTP/1.1 200 OK\r\n"
"\r\n")
result = []
requestDeferred.addCallback(result.append)
response = result[0]
protocol = AccumulatingProtocol()
response.deliverBody(protocol)
self.protocol.dataReceived("foo")
self.protocol.dataReceived("bar")
self.assertEqual(protocol.data, "foobar")
self.protocol.connectionLost(
Failure(ConnectionDone("low-level transport disconnected")))
protocol.closedReason.trap(PotentialDataLoss)
def test_chunkedResponseBodyUnfinishedWhenConnectionLost(self):
"""
If the final chunk has not been received when the connection is lost
(for any reason), the protocol passed to C{deliverBody} has its
C{connectionLost} method called with a L{Failure} wrapping the
exception for that reason.
"""
requestDeferred = self.protocol.request(Request('GET', '/', _boringHeaders, None))
self.protocol.dataReceived(
"HTTP/1.1 200 OK\r\n"
"Transfer-Encoding: chunked\r\n"
"\r\n")
result = []
requestDeferred.addCallback(result.append)
response = result[0]
protocol = AccumulatingProtocol()
response.deliverBody(protocol)
self.protocol.dataReceived("3\r\nfoo\r\n")
self.protocol.dataReceived("3\r\nbar\r\n")
self.assertEqual(protocol.data, "foobar")
self.protocol.connectionLost(Failure(ArbitraryException()))
return assertResponseFailed(
self, fail(protocol.closedReason), [ArbitraryException, _DataLoss])
def test_parserDataReceivedException(self):
"""
If the parser L{HTTP11ClientProtocol} delivers bytes to in
C{dataReceived} raises an exception, the exception is wrapped in a
L{Failure} and passed to the parser's C{connectionLost} and then the
L{HTTP11ClientProtocol}'s transport is disconnected.
"""
requestDeferred = self.protocol.request(Request('GET', '/', _boringHeaders, None))
self.protocol.dataReceived('unparseable garbage goes here\r\n')
d = assertResponseFailed(self, requestDeferred, [ParseError])
def cbFailed(exc):
self.assertTrue(self.transport.disconnecting)
self.assertEqual(
exc.reasons[0].value.data, 'unparseable garbage goes here')
# Now do what StringTransport doesn't do but a real transport would
# have, call connectionLost on the HTTP11ClientProtocol. Nothing
# is asserted about this, but it's important for it to not raise an
# exception.
self.protocol.connectionLost(Failure(ConnectionDone("it is done")))
d.addCallback(cbFailed)
return d
def test_proxyStopped(self):
"""
When the HTTP response parser is disconnected, the
L{TransportProxyProducer} which was connected to it as a transport is
stopped.
"""
requestDeferred = self.protocol.request(Request('GET', '/', _boringHeaders, None))
transport = self.protocol._parser.transport
self.assertIdentical(transport._producer, self.transport)
self.protocol._disconnectParser(Failure(ConnectionDone("connection done")))
self.assertIdentical(transport._producer, None)
return assertResponseFailed(self, requestDeferred, [ConnectionDone])
def test_abortClosesConnection(self):
"""
The transport will be told to close its connection when
L{HTTP11ClientProtocol.abort} is invoked.
"""
transport = StringTransport()
protocol = HTTP11ClientProtocol()
protocol.makeConnection(transport)
protocol.abort()
self.assertTrue(transport.disconnecting)
def test_abortBeforeResponseBody(self):
"""
The Deferred returned by L{HTTP11ClientProtocol.request} will fire
with a L{ResponseFailed} failure containing a L{ConnectionAborted}
exception, if the connection was aborted before all response headers
have been received.
"""
transport = StringTransport()
protocol = HTTP11ClientProtocol()
protocol.makeConnection(transport)
result = protocol.request(Request('GET', '/', _boringHeaders, None))
protocol.abort()
self.assertTrue(transport.disconnecting)
protocol.connectionLost(Failure(ConnectionDone()))
return assertResponseFailed(self, result, [ConnectionAborted])
def test_abortAfterResponseHeaders(self):
"""
When the connection is aborted after the response headers have
been received and the L{Response} has been made available to
application code, the response body protocol's C{connectionLost}
method will be invoked with a L{ResponseFailed} failure containing a
L{ConnectionAborted} exception.
"""
transport = StringTransport()
protocol = HTTP11ClientProtocol()
protocol.makeConnection(transport)
result = protocol.request(Request('GET', '/', _boringHeaders, None))
protocol.dataReceived(
"HTTP/1.1 200 OK\r\n"
"Content-Length: 1\r\n"
"\r\n"
)
testResult = Deferred()
class BodyDestination(Protocol):
"""
A body response protocol which immediately aborts the HTTP
connection.
"""
def connectionMade(self):
"""
Abort the HTTP connection.
"""
protocol.abort()
def connectionLost(self, reason):
"""
Make the reason for the losing of the connection available to
the unit test via C{testResult}.
"""
testResult.errback(reason)
def deliverBody(response):
"""
Connect the L{BodyDestination} response body protocol to the
response, and then simulate connection loss after ensuring that
the HTTP connection has been aborted.
"""
response.deliverBody(BodyDestination())
self.assertTrue(transport.disconnecting)
protocol.connectionLost(Failure(ConnectionDone()))
result.addCallback(deliverBody)
return assertResponseFailed(self, testResult,
[ConnectionAborted, _DataLoss])
class StringProducer:
"""
L{StringProducer} is a dummy body producer.
@ivar stopped: A flag which indicates whether or not C{stopProducing} has
been called.
@ivar consumer: After C{startProducing} is called, the value of the
C{consumer} argument to that method.
@ivar finished: After C{startProducing} is called, a L{Deferred} which was
returned by that method. L{StringProducer} will never fire this
L{Deferred}.
"""
implements(IBodyProducer)
stopped = False
def __init__(self, length):
self.length = length
def startProducing(self, consumer):
self.consumer = consumer
self.finished = Deferred()
return self.finished
def stopProducing(self):
self.stopped = True
class RequestTests(TestCase):
"""
Tests for L{Request}.
"""
def setUp(self):
self.transport = StringTransport()
def test_sendSimplestRequest(self):
"""
L{Request.writeTo} formats the request data and writes it to the given
transport.
"""
Request('GET', '/', _boringHeaders, None).writeTo(self.transport)
self.assertEqual(
self.transport.value(),
"GET / HTTP/1.1\r\n"
"Connection: close\r\n"
"Host: example.com\r\n"
"\r\n")
def test_sendRequestHeaders(self):
"""
L{Request.writeTo} formats header data and writes it to the given
transport.
"""
headers = Headers({'x-foo': ['bar', 'baz'], 'host': ['example.com']})
Request('GET', '/foo', headers, None).writeTo(self.transport)
lines = self.transport.value().split('\r\n')
self.assertEqual(lines[0], "GET /foo HTTP/1.1")
self.assertEqual(lines[-2:], ["", ""])
del lines[0], lines[-2:]
lines.sort()
self.assertEqual(
lines,
["Connection: close",
"Host: example.com",
"X-Foo: bar",
"X-Foo: baz"])
def test_sendChunkedRequestBody(self):
"""
L{Request.writeTo} uses chunked encoding to write data from the request
body producer to the given transport. It registers the request body
producer with the transport.
"""
producer = StringProducer(UNKNOWN_LENGTH)
request = Request('POST', '/bar', _boringHeaders, producer)
request.writeTo(self.transport)
self.assertNotIdentical(producer.consumer, None)
self.assertIdentical(self.transport.producer, producer)
self.assertTrue(self.transport.streaming)
self.assertEqual(
self.transport.value(),
"POST /bar HTTP/1.1\r\n"
"Connection: close\r\n"
"Transfer-Encoding: chunked\r\n"
"Host: example.com\r\n"
"\r\n")
self.transport.clear()
producer.consumer.write('x' * 3)
producer.consumer.write('y' * 15)
producer.finished.callback(None)
self.assertIdentical(self.transport.producer, None)
self.assertEqual(
self.transport.value(),
"3\r\n"
"xxx\r\n"
"f\r\n"
"yyyyyyyyyyyyyyy\r\n"
"0\r\n"
"\r\n")
def test_sendChunkedRequestBodyWithError(self):
"""
If L{Request} is created with a C{bodyProducer} without a known length
and the L{Deferred} returned from its C{startProducing} method fires
with a L{Failure}, the L{Deferred} returned by L{Request.writeTo} fires
with that L{Failure} and the body producer is unregistered from the
transport. The final zero-length chunk is not written to the
transport.
"""
producer = StringProducer(UNKNOWN_LENGTH)
request = Request('POST', '/bar', _boringHeaders, producer)
writeDeferred = request.writeTo(self.transport)
self.transport.clear()
producer.finished.errback(ArbitraryException())
def cbFailed(ignored):
self.assertEqual(self.transport.value(), "")
self.assertIdentical(self.transport.producer, None)
d = self.assertFailure(writeDeferred, ArbitraryException)
d.addCallback(cbFailed)
return d
def test_sendRequestBodyWithLength(self):
"""
If L{Request} is created with a C{bodyProducer} with a known length,
that length is sent as the value for the I{Content-Length} header and
chunked encoding is not used.
"""
producer = StringProducer(3)
request = Request('POST', '/bar', _boringHeaders, producer)
request.writeTo(self.transport)
self.assertNotIdentical(producer.consumer, None)
self.assertIdentical(self.transport.producer, producer)
self.assertTrue(self.transport.streaming)
self.assertEqual(
self.transport.value(),
"POST /bar HTTP/1.1\r\n"
"Connection: close\r\n"
"Content-Length: 3\r\n"
"Host: example.com\r\n"
"\r\n")
self.transport.clear()
producer.consumer.write('abc')
producer.finished.callback(None)
self.assertIdentical(self.transport.producer, None)
self.assertEqual(self.transport.value(), "abc")
def test_sendRequestBodyWithTooFewBytes(self):
"""
If L{Request} is created with a C{bodyProducer} with a known length and
the producer does not produce that many bytes, the L{Deferred} returned
by L{Request.writeTo} fires with a L{Failure} wrapping a
L{WrongBodyLength} exception.
"""
producer = StringProducer(3)
request = Request('POST', '/bar', _boringHeaders, producer)
writeDeferred = request.writeTo(self.transport)
producer.consumer.write('ab')
producer.finished.callback(None)
self.assertIdentical(self.transport.producer, None)
return self.assertFailure(writeDeferred, WrongBodyLength)
def _sendRequestBodyWithTooManyBytesTest(self, finisher):
"""
Verify that when too many bytes have been written by a body producer
and then the body producer's C{startProducing} L{Deferred} fires that
the producer is unregistered from the transport and that the
L{Deferred} returned from L{Request.writeTo} is fired with a L{Failure}
wrapping a L{WrongBodyLength}.
@param finisher: A callable which will be invoked with the body
producer after too many bytes have been written to the transport.
It should fire the startProducing Deferred somehow.
"""
producer = StringProducer(3)
request = Request('POST', '/bar', _boringHeaders, producer)
writeDeferred = request.writeTo(self.transport)
producer.consumer.write('ab')
# The producer hasn't misbehaved yet, so it shouldn't have been
# stopped.
self.assertFalse(producer.stopped)
producer.consumer.write('cd')
# Now the producer *has* misbehaved, so we should have tried to
# make it stop.
self.assertTrue(producer.stopped)
# The transport should have had the producer unregistered from it as
# well.
self.assertIdentical(self.transport.producer, None)
def cbFailed(exc):
# The "cd" should not have been written to the transport because
# the request can now locally be recognized to be invalid. If we
# had written the extra bytes, the server could have decided to
# start processing the request, which would be bad since we're
# going to indicate failure locally.
self.assertEqual(
self.transport.value(),
"POST /bar HTTP/1.1\r\n"
"Connection: close\r\n"
"Content-Length: 3\r\n"
"Host: example.com\r\n"
"\r\n"
"ab")
self.transport.clear()
# Subsequent writes should be ignored, as should firing the
# Deferred returned from startProducing.
self.assertRaises(ExcessWrite, producer.consumer.write, 'ef')
# Likewise, if the Deferred returned from startProducing fires,
# this should more or less be ignored (aside from possibly logging
# an error).
finisher(producer)
# There should have been nothing further written to the transport.
self.assertEqual(self.transport.value(), "")
d = self.assertFailure(writeDeferred, WrongBodyLength)
d.addCallback(cbFailed)
return d
def test_sendRequestBodyWithTooManyBytes(self):
"""
If L{Request} is created with a C{bodyProducer} with a known length and
the producer tries to produce more than than many bytes, the
L{Deferred} returned by L{Request.writeTo} fires with a L{Failure}
wrapping a L{WrongBodyLength} exception.
"""
def finisher(producer):
producer.finished.callback(None)
return self._sendRequestBodyWithTooManyBytesTest(finisher)
def test_sendRequestBodyErrorWithTooManyBytes(self):
"""
If L{Request} is created with a C{bodyProducer} with a known length and
the producer tries to produce more than than many bytes, the
L{Deferred} returned by L{Request.writeTo} fires with a L{Failure}
wrapping a L{WrongBodyLength} exception.
"""
def finisher(producer):
producer.finished.errback(ArbitraryException())
errors = self.flushLoggedErrors(ArbitraryException)
self.assertEqual(len(errors), 1)
return self._sendRequestBodyWithTooManyBytesTest(finisher)
def test_sendRequestBodyErrorWithConsumerError(self):
"""
Though there should be no way for the internal C{finishedConsuming}
L{Deferred} in L{Request._writeToContentLength} to fire a L{Failure}
after the C{finishedProducing} L{Deferred} has fired, in case this does
happen, the error should be logged with a message about how there's
probably a bug in L{Request}.
This is a whitebox test.
"""
producer = StringProducer(3)
request = Request('POST', '/bar', _boringHeaders, producer)
writeDeferred = request.writeTo(self.transport)
finishedConsuming = producer.consumer._finished
producer.consumer.write('abc')
producer.finished.callback(None)
finishedConsuming.errback(ArbitraryException())
self.assertEqual(len(self.flushLoggedErrors(ArbitraryException)), 1)
def _sendRequestBodyFinishedEarlyThenTooManyBytes(self, finisher):
"""
Verify that if the body producer fires its Deferred and then keeps
writing to the consumer that the extra writes are ignored and the
L{Deferred} returned by L{Request.writeTo} fires with a L{Failure}
wrapping the most appropriate exception type.
"""
producer = StringProducer(3)
request = Request('POST', '/bar', _boringHeaders, producer)
writeDeferred = request.writeTo(self.transport)
producer.consumer.write('ab')
finisher(producer)
self.assertIdentical(self.transport.producer, None)
self.transport.clear()
self.assertRaises(ExcessWrite, producer.consumer.write, 'cd')
self.assertEqual(self.transport.value(), "")
return writeDeferred
def test_sendRequestBodyFinishedEarlyThenTooManyBytes(self):
"""
If the request body producer indicates it is done by firing the
L{Deferred} returned from its C{startProducing} method but then goes on
to write too many bytes, the L{Deferred} returned by {Request.writeTo}
fires with a L{Failure} wrapping L{WrongBodyLength}.
"""
def finisher(producer):
producer.finished.callback(None)
return self.assertFailure(
self._sendRequestBodyFinishedEarlyThenTooManyBytes(finisher),
WrongBodyLength)
def test_sendRequestBodyErroredEarlyThenTooManyBytes(self):
"""
If the request body producer indicates an error by firing the
L{Deferred} returned from its C{startProducing} method but then goes on
to write too many bytes, the L{Deferred} returned by {Request.writeTo}
fires with that L{Failure} and L{WrongBodyLength} is logged.
"""
def finisher(producer):
producer.finished.errback(ArbitraryException())
return self.assertFailure(
self._sendRequestBodyFinishedEarlyThenTooManyBytes(finisher),
ArbitraryException)
def test_sendChunkedRequestBodyFinishedThenWriteMore(self, _with=None):
"""
If the request body producer with an unknown length tries to write
after firing the L{Deferred} returned by its C{startProducing} method,
the C{write} call raises an exception and does not write anything to
the underlying transport.
"""
producer = StringProducer(UNKNOWN_LENGTH)
request = Request('POST', '/bar', _boringHeaders, producer)
writeDeferred = request.writeTo(self.transport)
producer.finished.callback(_with)
self.transport.clear()
self.assertRaises(ExcessWrite, producer.consumer.write, 'foo')
self.assertEqual(self.transport.value(), "")
return writeDeferred
def test_sendChunkedRequestBodyFinishedWithErrorThenWriteMore(self):
"""
If the request body producer with an unknown length tries to write
after firing the L{Deferred} returned by its C{startProducing} method
with a L{Failure}, the C{write} call raises an exception and does not
write anything to the underlying transport.
"""
d = self.test_sendChunkedRequestBodyFinishedThenWriteMore(
Failure(ArbitraryException()))
return self.assertFailure(d, ArbitraryException)
def test_sendRequestBodyWithError(self):
"""
If the L{Deferred} returned from the C{startProducing} method of the
L{IBodyProducer} passed to L{Request} fires with a L{Failure}, the
L{Deferred} returned from L{Request.writeTo} fails with that
L{Failure}.
"""
producer = StringProducer(5)
request = Request('POST', '/bar', _boringHeaders, producer)
writeDeferred = request.writeTo(self.transport)
# Sanity check - the producer should be registered with the underlying
# transport.
self.assertIdentical(self.transport.producer, producer)
self.assertTrue(self.transport.streaming)
producer.consumer.write('ab')
self.assertEqual(
self.transport.value(),
"POST /bar HTTP/1.1\r\n"
"Connection: close\r\n"
"Content-Length: 5\r\n"
"Host: example.com\r\n"
"\r\n"
"ab")
self.assertFalse(self.transport.disconnecting)
producer.finished.errback(Failure(ArbitraryException()))
# Disconnection is handled by a higher level. Request should leave the
# transport alone in this case.
self.assertFalse(self.transport.disconnecting)
# Oh. Except it should unregister the producer that it registered.
self.assertIdentical(self.transport.producer, None)
return self.assertFailure(writeDeferred, ArbitraryException)
def test_hostHeaderRequired(self):
"""
L{Request.writeTo} raises L{BadHeaders} if there is not exactly one
I{Host} header and writes nothing to the given transport.
"""
request = Request('GET', '/', Headers({}), None)
self.assertRaises(BadHeaders, request.writeTo, self.transport)
self.assertEqual(self.transport.value(), '')
request = Request('GET', '/', Headers({'Host': ['example.com', 'example.org']}), None)
self.assertRaises(BadHeaders, request.writeTo, self.transport)
self.assertEqual(self.transport.value(), '')
def test_stopWriting(self):
"""
L{Request.stopWriting} calls its body producer's C{stopProducing}
method.
"""
producer = StringProducer(3)
request = Request('GET', '/', _boringHeaders, producer)
d = request.writeTo(self.transport)
self.assertFalse(producer.stopped)
request.stopWriting()
self.assertTrue(producer.stopped)
def test_brokenStopProducing(self):
"""
If the body producer's C{stopProducing} method raises an exception,
L{Request.stopWriting} logs it and does not re-raise it.
"""
producer = StringProducer(3)
def brokenStopProducing():
raise ArbitraryException("stopProducing is busted")
producer.stopProducing = brokenStopProducing
request = Request('GET', '/', _boringHeaders, producer)
d = request.writeTo(self.transport)
request.stopWriting()
self.assertEqual(
len(self.flushLoggedErrors(ArbitraryException)), 1)
class LengthEnforcingConsumerTests(TestCase):
"""
Tests for L{LengthEnforcingConsumer}.
"""
def setUp(self):
self.result = Deferred()
self.producer = StringProducer(10)
self.transport = StringTransport()
self.enforcer = LengthEnforcingConsumer(
self.producer, self.transport, self.result)
def test_write(self):
"""
L{LengthEnforcingConsumer.write} calls the wrapped consumer's C{write}
method with the bytes it is passed as long as there are fewer of them
than the C{length} attribute indicates remain to be received.
"""
self.enforcer.write('abc')
self.assertEqual(self.transport.value(), 'abc')
self.transport.clear()
self.enforcer.write('def')
self.assertEqual(self.transport.value(), 'def')
def test_finishedEarly(self):
"""
L{LengthEnforcingConsumer._noMoreWritesExpected} raises
L{WrongBodyLength} if it is called before the indicated number of bytes
have been written.
"""
self.enforcer.write('x' * 9)
self.assertRaises(WrongBodyLength, self.enforcer._noMoreWritesExpected)
def test_writeTooMany(self, _unregisterAfter=False):
"""
If it is called with a total number of bytes exceeding the indicated
limit passed to L{LengthEnforcingConsumer.__init__},
L{LengthEnforcingConsumer.write} fires the L{Deferred} with a
L{Failure} wrapping a L{WrongBodyLength} and also calls the
C{stopProducing} method of the producer.
"""
self.enforcer.write('x' * 10)
self.assertFalse(self.producer.stopped)
self.enforcer.write('x')
self.assertTrue(self.producer.stopped)
if _unregisterAfter:
self.enforcer._noMoreWritesExpected()
return self.assertFailure(self.result, WrongBodyLength)
def test_writeAfterNoMoreExpected(self):
"""
If L{LengthEnforcingConsumer.write} is called after
L{LengthEnforcingConsumer._noMoreWritesExpected}, it calls the
producer's C{stopProducing} method and raises L{ExcessWrite}.
"""
self.enforcer.write('x' * 10)
self.enforcer._noMoreWritesExpected()
self.assertFalse(self.producer.stopped)
self.assertRaises(ExcessWrite, self.enforcer.write, 'x')
self.assertTrue(self.producer.stopped)
def test_finishedLate(self):
"""
L{LengthEnforcingConsumer._noMoreWritesExpected} does nothing (in
particular, it does not raise any exception) if called after too many
bytes have been passed to C{write}.
"""
return self.test_writeTooMany(True)
def test_finished(self):
"""
If L{LengthEnforcingConsumer._noMoreWritesExpected} is called after
the correct number of bytes have been written it returns C{None}.
"""
self.enforcer.write('x' * 10)
self.assertIdentical(self.enforcer._noMoreWritesExpected(), None)
def test_stopProducingRaises(self):
"""
If L{LengthEnforcingConsumer.write} calls the producer's
C{stopProducing} because too many bytes were written and the
C{stopProducing} method raises an exception, the exception is logged
and the L{LengthEnforcingConsumer} still errbacks the finished
L{Deferred}.
"""
def brokenStopProducing():
StringProducer.stopProducing(self.producer)
raise ArbitraryException("stopProducing is busted")
self.producer.stopProducing = brokenStopProducing
def cbFinished(ignored):
self.assertEqual(
len(self.flushLoggedErrors(ArbitraryException)), 1)
d = self.test_writeTooMany()
d.addCallback(cbFinished)
return d
class RequestBodyConsumerTests(TestCase):
"""
Tests for L{ChunkedEncoder} which sits between an L{ITransport} and a
request/response body producer and chunked encodes everything written to
it.
"""
def test_interface(self):
"""
L{ChunkedEncoder} instances provide L{IConsumer}.
"""
self.assertTrue(
verifyObject(IConsumer, ChunkedEncoder(StringTransport())))
def test_write(self):
"""
L{ChunkedEncoder.write} writes to the transport the chunked encoded
form of the bytes passed to it.
"""
transport = StringTransport()
encoder = ChunkedEncoder(transport)
encoder.write('foo')
self.assertEqual(transport.value(), '3\r\nfoo\r\n')
transport.clear()
encoder.write('x' * 16)
self.assertEqual(transport.value(), '10\r\n' + 'x' * 16 + '\r\n')
def test_producerRegistration(self):
"""
L{ChunkedEncoder.registerProducer} registers the given streaming
producer with its transport and L{ChunkedEncoder.unregisterProducer}
writes a zero-length chunk to its transport and unregisters the
transport's producer.
"""
transport = StringTransport()
producer = object()
encoder = ChunkedEncoder(transport)
encoder.registerProducer(producer, True)
self.assertIdentical(transport.producer, producer)
self.assertTrue(transport.streaming)
encoder.unregisterProducer()
self.assertIdentical(transport.producer, None)
self.assertEqual(transport.value(), '0\r\n\r\n')
class TransportProxyProducerTests(TestCase):
"""
Tests for L{TransportProxyProducer} which proxies the L{IPushProducer}
interface of a transport.
"""
def test_interface(self):
"""
L{TransportProxyProducer} instances provide L{IPushProducer}.
"""
self.assertTrue(
verifyObject(IPushProducer, TransportProxyProducer(None)))
def test_stopProxyingUnreferencesProducer(self):
"""
L{TransportProxyProducer._stopProxying} drops the reference to the
wrapped L{IPushProducer} provider.
"""
transport = StringTransport()
proxy = TransportProxyProducer(transport)
self.assertIdentical(proxy._producer, transport)
proxy._stopProxying()
self.assertIdentical(proxy._producer, None)
def test_resumeProducing(self):
"""
L{TransportProxyProducer.resumeProducing} calls the wrapped
transport's C{resumeProducing} method unless told to stop proxying.
"""
transport = StringTransport()
transport.pauseProducing()
proxy = TransportProxyProducer(transport)
# The transport should still be paused.
self.assertEqual(transport.producerState, 'paused')
proxy.resumeProducing()
# The transport should now be resumed.
self.assertEqual(transport.producerState, 'producing')
transport.pauseProducing()
proxy._stopProxying()
# The proxy should no longer do anything to the transport.
proxy.resumeProducing()
self.assertEqual(transport.producerState, 'paused')
def test_pauseProducing(self):
"""
L{TransportProxyProducer.pauseProducing} calls the wrapped transport's
C{pauseProducing} method unless told to stop proxying.
"""
transport = StringTransport()
proxy = TransportProxyProducer(transport)
# The transport should still be producing.
self.assertEqual(transport.producerState, 'producing')
proxy.pauseProducing()
# The transport should now be paused.
self.assertEqual(transport.producerState, 'paused')
transport.resumeProducing()
proxy._stopProxying()
# The proxy should no longer do anything to the transport.
proxy.pauseProducing()
self.assertEqual(transport.producerState, 'producing')
def test_stopProducing(self):
"""
L{TransportProxyProducer.stopProducing} calls the wrapped transport's
C{stopProducing} method unless told to stop proxying.
"""
transport = StringTransport()
proxy = TransportProxyProducer(transport)
# The transport should still be producing.
self.assertEqual(transport.producerState, 'producing')
proxy.stopProducing()
# The transport should now be stopped.
self.assertEqual(transport.producerState, 'stopped')
transport = StringTransport()
proxy = TransportProxyProducer(transport)
proxy._stopProxying()
proxy.stopProducing()
# The transport should not have been stopped.
self.assertEqual(transport.producerState, 'producing')
class ResponseTests(TestCase):
"""
Tests for L{Response}.
"""
def test_makeConnection(self):
"""
The L{IProtocol} provider passed to L{Response.deliverBody} has its
C{makeConnection} method called with an L{IPushProducer} provider
hooked up to the response as an argument.
"""
producers = []
transport = StringTransport()
class SomeProtocol(Protocol):
def makeConnection(self, producer):
producers.append(producer)
consumer = SomeProtocol()
response = justTransportResponse(transport)
response.deliverBody(consumer)
[theProducer] = producers
theProducer.pauseProducing()
self.assertEqual(transport.producerState, 'paused')
theProducer.resumeProducing()
self.assertEqual(transport.producerState, 'producing')
def test_dataReceived(self):
"""
The L{IProtocol} provider passed to L{Response.deliverBody} has its
C{dataReceived} method called with bytes received as part of the
response body.
"""
bytes = []
class ListConsumer(Protocol):
def dataReceived(self, data):
bytes.append(data)
consumer = ListConsumer()
response = justTransportResponse(StringTransport())
response.deliverBody(consumer)
response._bodyDataReceived('foo')
self.assertEqual(bytes, ['foo'])
def test_connectionLost(self):
"""
The L{IProtocol} provider passed to L{Response.deliverBody} has its
C{connectionLost} method called with a L{Failure} wrapping
L{ResponseDone} when the response's C{_bodyDataFinished} method is
called.
"""
lost = []
class ListConsumer(Protocol):
def connectionLost(self, reason):
lost.append(reason)
consumer = ListConsumer()
response = justTransportResponse(StringTransport())
response.deliverBody(consumer)
response._bodyDataFinished()
lost[0].trap(ResponseDone)
self.assertEqual(len(lost), 1)
# The protocol reference should be dropped, too, to facilitate GC or
# whatever.
self.assertIdentical(response._bodyProtocol, None)
def test_bufferEarlyData(self):
"""
If data is delivered to the L{Response} before a protocol is registered
with C{deliverBody}, that data is buffered until the protocol is
registered and then is delivered.
"""
bytes = []
class ListConsumer(Protocol):
def dataReceived(self, data):
bytes.append(data)
protocol = ListConsumer()
response = justTransportResponse(StringTransport())
response._bodyDataReceived('foo')
response._bodyDataReceived('bar')
response.deliverBody(protocol)
response._bodyDataReceived('baz')
self.assertEqual(bytes, ['foo', 'bar', 'baz'])
# Make sure the implementation-detail-byte-buffer is cleared because
# not clearing it wastes memory.
self.assertIdentical(response._bodyBuffer, None)
def test_multipleStartProducingFails(self):
"""
L{Response.deliverBody} raises L{RuntimeError} if called more than
once.
"""
response = justTransportResponse(StringTransport())
response.deliverBody(Protocol())
self.assertRaises(RuntimeError, response.deliverBody, Protocol())
def test_startProducingAfterFinishedFails(self):
"""
L{Response.deliverBody} raises L{RuntimeError} if called after
L{Response._bodyDataFinished}.
"""
response = justTransportResponse(StringTransport())
response.deliverBody(Protocol())
response._bodyDataFinished()
self.assertRaises(RuntimeError, response.deliverBody, Protocol())
def test_bodyDataReceivedAfterFinishedFails(self):
"""
L{Response._bodyDataReceived} raises L{RuntimeError} if called after
L{Response._bodyDataFinished} but before L{Response.deliverBody}.
"""
response = justTransportResponse(StringTransport())
response._bodyDataFinished()
self.assertRaises(RuntimeError, response._bodyDataReceived, 'foo')
def test_bodyDataReceivedAfterDeliveryFails(self):
"""
L{Response._bodyDataReceived} raises L{RuntimeError} if called after
L{Response._bodyDataFinished} and after L{Response.deliverBody}.
"""
response = justTransportResponse(StringTransport())
response._bodyDataFinished()
response.deliverBody(Protocol())
self.assertRaises(RuntimeError, response._bodyDataReceived, 'foo')
def test_bodyDataFinishedAfterFinishedFails(self):
"""
L{Response._bodyDataFinished} raises L{RuntimeError} if called more
than once.
"""
response = justTransportResponse(StringTransport())
response._bodyDataFinished()
self.assertRaises(RuntimeError, response._bodyDataFinished)
def test_bodyDataFinishedAfterDeliveryFails(self):
"""
L{Response._bodyDataFinished} raises L{RuntimeError} if called after
the body has been delivered.
"""
response = justTransportResponse(StringTransport())
response._bodyDataFinished()
response.deliverBody(Protocol())
self.assertRaises(RuntimeError, response._bodyDataFinished)
def test_transportResumed(self):
"""
L{Response.deliverBody} resumes the HTTP connection's transport
before passing it to the transport's C{makeConnection} method.
"""
transportState = []
class ListConsumer(Protocol):
def makeConnection(self, transport):
transportState.append(transport.producerState)
transport = StringTransport()
transport.pauseProducing()
protocol = ListConsumer()
response = justTransportResponse(transport)
self.assertEqual(transport.producerState, 'paused')
response.deliverBody(protocol)
self.assertEqual(transportState, ['producing'])
def test_bodyDataFinishedBeforeStartProducing(self):
"""
If the entire body is delivered to the L{Response} before the
response's C{deliverBody} method is called, the protocol passed to
C{deliverBody} is immediately given the body data and then
disconnected.
"""
transport = StringTransport()
response = justTransportResponse(transport)
response._bodyDataReceived('foo')
response._bodyDataReceived('bar')
response._bodyDataFinished()
protocol = AccumulatingProtocol()
response.deliverBody(protocol)
self.assertEqual(protocol.data, 'foobar')
protocol.closedReason.trap(ResponseDone)
def test_finishedWithErrorWhenConnected(self):
"""
The L{Failure} passed to L{Response._bodyDataFinished} when the response
is in the I{connected} state is passed to the C{connectionLost} method
of the L{IProtocol} provider passed to the L{Response}'s
C{deliverBody} method.
"""
transport = StringTransport()
response = justTransportResponse(transport)
protocol = AccumulatingProtocol()
response.deliverBody(protocol)
# Sanity check - this test is for the connected state
self.assertEqual(response._state, 'CONNECTED')
response._bodyDataFinished(Failure(ArbitraryException()))
protocol.closedReason.trap(ArbitraryException)
def test_finishedWithErrorWhenInitial(self):
"""
The L{Failure} passed to L{Response._bodyDataFinished} when the response
is in the I{initial} state is passed to the C{connectionLost} method of
the L{IProtocol} provider passed to the L{Response}'s C{deliverBody}
method.
"""
transport = StringTransport()
response = justTransportResponse(transport)
# Sanity check - this test is for the initial state
self.assertEqual(response._state, 'INITIAL')
response._bodyDataFinished(Failure(ArbitraryException()))
protocol = AccumulatingProtocol()
response.deliverBody(protocol)
protocol.closedReason.trap(ArbitraryException)
|
with-git/tensorflow
|
refs/heads/master
|
tensorflow/tools/docs/build_docs_test.py
|
53
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Run the python doc generator and fail if there are any broken links."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import textwrap
import tensorflow as tf
from tensorflow.python import debug as tf_debug
from tensorflow.python.platform import googletest
from tensorflow.python.platform import resource_loader
from tensorflow.tools.docs import generate_lib
class Flags(object):
resource_root = resource_loader.get_root_dir_with_all_resources()
src_dir = os.path.join(resource_root, 'tensorflow/docs_src')
base_dir = os.path.join(resource_root, 'tensorflow/')
output_dir = googletest.GetTempDir()
class BuildDocsTest(googletest.TestCase):
def testBuildDocs(self):
if sys.version_info >= (3, 0):
print('Warning: Doc generation is not supported from python3.')
return
doc_generator = generate_lib.DocGenerator()
doc_generator.set_py_modules([('tf', tf), ('tfdbg', tf_debug)])
try:
status = doc_generator.build(Flags())
except RuntimeError as e:
if not e.args[0].startswith('Modules nested too deep'):
raise
msg = textwrap.dedent("""\
%s
****************************************************************
If this test fails here, you have most likely introduced an
unsealed module. Make sure to use `remove_undocumented` or similar
utilities to avoid leaking symbols. See above for more information
on the exact point of failure.
****************************************************************
""" % e.args[0])
raise RuntimeError(msg)
if status:
self.fail('Found %s Errors!' % status)
if __name__ == '__main__':
googletest.main()
|
ohmsclaw/volhalla
|
refs/heads/master
|
mongoose/bindings/python/example.py
|
11
|
# This is Python example on how to use Mongoose embeddable web server,
# http://code.google.com/p/mongoose
#
# Before using the mongoose module, make sure that Mongoose shared library is
# built and present in the current (or system library) directory
import mongoose
import sys
# Handle /show and /form URIs.
def EventHandler(event, conn, info):
if event == mongoose.HTTP_ERROR:
conn.printf('%s', 'HTTP/1.0 200 OK\r\n')
conn.printf('%s', 'Content-Type: text/plain\r\n\r\n')
conn.printf('HTTP error: %d\n', info.status_code)
return True
elif event == mongoose.NEW_REQUEST and info.uri == '/show':
conn.printf('%s', 'HTTP/1.0 200 OK\r\n')
conn.printf('%s', 'Content-Type: text/plain\r\n\r\n')
conn.printf('%s %s\n', info.request_method, info.uri)
if info.request_method == 'POST':
content_len = conn.get_header('Content-Length')
post_data = conn.read(int(content_len))
my_var = conn.get_var(post_data, 'my_var')
else:
my_var = conn.get_var(info.query_string, 'my_var')
conn.printf('my_var: %s\n', my_var or '<not set>')
conn.printf('HEADERS: \n')
for header in info.http_headers[:info.num_headers]:
conn.printf(' %s: %s\n', header.name, header.value)
return True
elif event == mongoose.NEW_REQUEST and info.uri == '/form':
conn.write('HTTP/1.0 200 OK\r\n'
'Content-Type: text/html\r\n\r\n'
'Use GET: <a href="/show?my_var=hello">link</a>'
'<form action="/show" method="POST">'
'Use POST: type text and submit: '
'<input type="text" name="my_var"/>'
'<input type="submit"/>'
'</form>')
return True
elif event == mongoose.NEW_REQUEST and info.uri == '/secret':
conn.send_file('/etc/passwd')
return True
else:
return False
# Create mongoose object, and register '/foo' URI handler
# List of options may be specified in the contructor
server = mongoose.Mongoose(EventHandler,
document_root='/tmp',
listening_ports='8080')
print ('Mongoose started on port %s, press enter to quit'
% server.get_option('listening_ports'))
sys.stdin.read(1)
# Deleting server object stops all serving threads
print 'Stopping server.'
del server
|
ThiefMaster/indico
|
refs/heads/master
|
indico/modules/rb/models/locations_test.py
|
4
|
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from indico.modules.rb.models.rooms import Room
pytest_plugins = 'indico.modules.rb.testing.fixtures'
def test_room_name_format(create_location, db, dummy_user):
location = create_location('Foo')
location.room_name_format = '{building}|{floor}|{number}'
assert location._room_name_format == '%1$s|%2$s|%3$s'
Room(building=1, floor=2, number=3, verbose_name='First amphitheater', location=location, owner=dummy_user)
Room(building=1, floor=3, number=4, verbose_name='Second amphitheater', location=location, owner=dummy_user)
Room(building=1, floor=2, number=4, verbose_name='Room 3', location=location, owner=dummy_user)
db.session.flush()
assert Room.query.filter(Room.full_name.contains('|3')).count() == 2
|
russellmayhew/satchmo
|
refs/heads/master
|
satchmo/apps/satchmo_ext/newsletter/urls.py
|
5
|
"""
URLConf for Satchmo Newsletter app
This will get automatically added by satchmo_store, under the url given in your livesettings "NEWSLETTER","NEWSLETTER_SLUG"
"""
from django.conf.urls import *
from livesettings import config_value
import logging
log = logging.getLogger('newsletter.urls')
urlpatterns = patterns('satchmo_ext.newsletter.views',
(r'^subscribe/$', 'add_subscription', {}, 'newsletter_subscribe'),
(r'^subscribe/ajah/$', 'add_subscription',
{'result_template' : 'newsletter/ajah.html'}, 'newsletter_subscribe_ajah'),
(r'^unsubscribe/$', 'remove_subscription',
{}, 'newsletter_unsubscribe'),
(r'^unsubscribe/ajah/$', 'remove_subscription',
{'result_template' : 'newsletter/ajah.html'}, 'newsletter_unsubscribe_ajah'),
(r'^update/$', 'update_subscription', {}, 'newsletter_update'),
)
patterns_fn = patterns
def add_newsletter_urls(sender, patterns=(), **kwargs):
newsbase = r'^' + config_value('NEWSLETTER','NEWSLETTER_SLUG') + '/'
log.debug("Adding newsletter urls at %s", newsbase)
newspatterns = patterns_fn('',
(newsbase, include('satchmo_ext.newsletter.urls'))
)
patterns += newspatterns
|
kkappel/web2py-community
|
refs/heads/master
|
languages/my.py
|
1
|
# -*- coding: utf-8 -*-
{
'!langcode!': 'my',
'!langname!': 'Malay',
'%d days ago': '%d hari yang lalu',
'%d hours ago': '%d jam yang lalu',
'%d minutes ago': '%d minit yang lalu',
'%d months ago': '%d bulan yang lalu',
'%d seconds ago': '%d saat yang lalu',
'%d seconds from now': '%d saat dari sekarang',
'%d weeks ago': '%d minggu yang lalu',
'%d years ago': '%d tahun yang lalu',
'%s %%{row} deleted': '%s %%{row} dihapuskan',
'%s %%{row} updated': '%s %%{row} dikemas kini',
'%s selected': '%s dipilih',
'%Y-%m-%d': '%d-%m-%Y',
'%Y-%m-%d %H:%M:%S': '%d-%m-%Y %H:%M:%S',
'(requires internet access, experimental)': '(memerlukan akses internet, percubaan)',
'(something like "it-it")': '(sesuatu seperti "it-it")',
'1 day ago': '1 hari yang lalu',
'1 hour ago': '1 jam yang lalu',
'1 minute ago': '1 minit yang lalu',
'1 month ago': '1 bulan yang lalu',
'1 second ago': '1 saat yang lalu',
'1 week ago': '1 minggu yang lalu',
'1 year ago': '1 tahun yang lalu',
'< Previous': '< Sebelumnya',
'About': 'Mengenai',
'Add': 'Tambah',
'Admin language': 'Bahasa admin',
'Administrator Password:': 'Kata laluan Administrator:',
'Ajax Recipes': 'Resipi Ajax',
'An error occured, please %s the page': 'Kesilapan telah berlaku, sila %s laman',
'And': 'Dan',
'and rename it:': 'dan menamakan itu:',
'are not used yet': 'tidak digunakan lagi',
'Are you sure you want to delete this object?': 'Apakah anda yakin anda mahu memadam ini?',
'Back': 'Kembali',
'Buy this book': 'Beli buku ini',
'cache, errors and sessions cleaned': 'cache, kesilapan dan sesi dibersihkan',
'Cancel': 'Batal',
'Cannot be empty': 'Tidak boleh kosong',
'Change admin password': 'Tukar kata laluan admin',
'Change password': 'Tukar kata laluan',
'Clean': 'Bersihkan',
'Clear': 'Hapus',
'Clear CACHE?': 'Hapus CACHE?',
'Clear DISK': 'Hapus DISK',
'Clear RAM': 'Hapus RAM',
'Click row to expand traceback': 'Klik baris untuk mengembangkan traceback',
'Close': 'Tutup',
'Community': 'Komuniti',
'Components and Plugins': 'Komponen dan Plugin',
'contains': 'mengandung',
'Copyright': 'Hak Cipta',
'Create': 'Buat',
'create file with filename:': 'mencipta fail dengan nama:',
'created by': 'dicipta oleh',
'currently running': 'sedang berjalan',
'data uploaded': 'data diunggah',
'Delete': 'Hapus',
'Delete this file (you will be asked to confirm deletion)': 'Padam fail ini (anda akan diminta untuk mengesahkan pemadaman)',
'Delete:': 'Hapus:',
'design': 'disain',
'direction: ltr': 'arah: ltr',
'Disk Cleared': 'Disk Dihapuskan',
'Documentation': 'Dokumentasi',
"Don't know what to do?": 'Tidak tahu apa yang perlu dilakukan?',
'done!': 'selesai!',
'Download': 'Unduh',
'Duration': 'Tempoh',
'Email : ': 'Emel : ',
'Email sent': 'Emel dihantar',
'enter a valid email address': 'masukkan alamat emel yang benar',
'enter a valid URL': 'masukkan URL yang benar',
'enter a value': 'masukkan data',
'Error': 'Kesalahan',
'Errors': 'Kesalahan',
'export as csv file': 'eksport sebagai file csv',
'Export:': 'Eksport:',
'File': 'Fail',
'filter': 'menapis',
'First Name': 'Nama Depan',
'Forgot username?': 'Lupa nama pengguna?',
'Free Applications': 'Aplikasi Percuma',
'Gender': 'Jenis Kelamin',
'Group %(group_id)s created': 'Kumpulan %(group_id)s dicipta',
'Group uniquely assigned to user %(id)s': 'Kumpulan unik yang diberikan kepada pengguna %(id)s',
'Groups': 'Kumpulan',
'Hello World': 'Halo Dunia',
'Help': 'Bantuan',
'Home': 'Laman Utama',
'How did you get here?': 'Bagaimana kamu boleh di sini?',
'Image': 'Gambar',
'import': 'import',
'Import/Export': 'Import/Eksport',
'includes': 'termasuk',
'Install': 'Pasang',
'Installation': 'Pemasangan',
'Introduction': 'Pengenalan',
'Invalid email': 'Emel tidak benar',
'Language': 'Bahasa',
'languages': 'bahasa',
'Languages': 'Bahasa',
'Last Name': 'Nama Belakang',
'License for': 'lesen untuk',
'loading...': 'sedang memuat...',
'Logged in': 'Masuk',
'Logged out': 'Keluar',
'Login': 'Masuk',
'Logout': 'Keluar',
'Lost Password': 'Lupa Kata Laluan',
'Lost password?': 'Lupa kata laluan?',
'Maintenance': 'Penyelenggaraan',
'Manage': 'Menguruskan',
'Manage Cache': 'Menguruskan Cache',
'models': 'model',
'Models': 'Model',
'Modules': 'Modul',
'modules': 'modul',
'My Sites': 'Laman Saya',
'New': 'Baru',
'New password': 'Kata laluan baru',
'next 100 rows': '100 baris seterusnya',
'Next >': 'Seterusnya >',
'Next Page': 'Laman Seterusnya',
'No ticket_storage.txt found under /private folder': 'Ticket_storage.txt tidak dijumpai di bawah folder /private',
'not a Zip Code': 'bukan Pos',
'Old password': 'Kata laluan lama',
'Online examples': 'Contoh Online',
'Or': 'Atau',
'or alternatively': 'atau sebagai alternatif',
'Or Get from URL:': 'Atau Dapatkan dari URL:',
'or import from csv file': 'atau import dari file csv',
'Other Plugins': 'Plugin Lain',
'Other Recipes': 'Resipi Lain',
'Overview': 'Tinjauan',
'Pack all': 'Mengemaskan semua',
'Password': 'Kata laluan',
'Password changed': 'Kata laluan berubah',
"Password fields don't match": 'Kata laluan tidak sama',
'please input your password again': 'sila masukan kata laluan anda lagi',
'plugins': 'plugin',
'Plugins': 'Plugin',
'Powered by': 'Disokong oleh',
'Preface': 'Pendahuluan',
'previous 100 rows': '100 baris sebelumnya',
'Previous Page': 'Laman Sebelumnya',
'private files': 'fail peribadi',
'Private files': 'Fail peribadi',
'Profile': 'Profil',
'Profile updated': 'Profil dikemaskini',
'Project Progress': 'Kemajuan Projek',
'Quick Examples': 'Contoh Cepat',
'Ram Cleared': 'Ram Dihapuskan',
'Recipes': 'Resipi',
'Register': 'Daftar',
'Registration successful': 'Pendaftaran berjaya',
'reload': 'memuat kembali',
'Reload routes': 'Memuat laluan kembali',
'Remember me (for 30 days)': 'Ingat saya (selama 30 hari)',
'Request reset password': 'Meminta reset kata laluan',
'Rows selected': 'Baris dipilih',
'Running on %s': 'Berjalan pada %s',
'Save model as...': 'Simpan model sebagai ...',
'Save profile': 'Simpan profil',
'Search': 'Cari',
'Select Files to Package': 'Pilih Fail untuk Pakej',
'Send Email': 'Kirim Emel',
'Size of cache:': 'Saiz cache:',
'Solution': 'Penyelesaian',
'starts with': 'bermula dengan',
'static': 'statik',
'Static': 'Statik',
'Statistics': 'Statistik',
'Support': 'Menyokong',
'test': 'ujian',
'There are no plugins': 'Tiada plugin',
'There are no private files': 'Tiada fail peribadi',
'These files are not served, they are only available from within your app': 'Fail-fail ini tidak disampaikan, mereka hanya boleh didapati dari dalam aplikasi anda',
'These files are served without processing, your images go here': 'Ini fail disampaikan tanpa pemprosesan, imej anda di sini',
'This App': 'App Ini',
'Time in Cache (h:m:s)': 'Waktu di Cache (h: m: s)',
'Title': 'Judul',
'To create a plugin, name a file/folder plugin_[name]': 'Untuk mencipta plugin, nama fail/folder plugin_ [nama]',
'too short': 'terlalu pendek',
'Unable to download because:': 'Tidak dapat memuat turun kerana:',
'unable to parse csv file': 'tidak mampu mengurai file csv',
'update all languages': 'mengemaskini semua bahasa',
'Update:': 'Kemas kini:',
'Upgrade': 'Menaik taraf',
'Upload': 'Unggah',
'Upload a package:': 'Unggah pakej:',
'upload file:': 'unggah fail:',
'upload plugin file:': 'unggah fail plugin:',
'User %(id)s Logged-in': 'Pengguna %(id)s Masuk',
'User %(id)s Logged-out': 'Pengguna %(id)s Keluar',
'User %(id)s Password changed': 'Pengguna %(id)s Kata Laluan berubah',
'User %(id)s Password reset': 'Pengguna %(id)s Kata Laluan telah direset',
'User %(id)s Profile updated': 'Pengguna %(id)s Profil dikemaskini',
'User %(id)s Registered': 'Pengguna %(id)s Didaftarkan',
'value not allowed': 'data tidak benar',
'Verify Password': 'Pengesahan Kata Laluan',
'Version': 'Versi',
'Versioning': 'Pembuatan Sejarah',
'View': 'Lihat',
'Views': 'Lihat',
'views': 'Lihat',
'Web Framework': 'Rangka Kerja Web',
'web2py Recent Tweets': 'Tweet terbaru web2py',
'Website': 'Laman Web',
'Welcome': 'Selamat Datang',
'Welcome to web2py!': 'Selamat Datang di web2py!',
'You are successfully running web2py': 'Anda berjaya menjalankan web2py',
'You can modify this application and adapt it to your needs': 'Anda boleh mengubah suai aplikasi ini dan menyesuaikan dengan keperluan anda',
'You visited the url %s': 'Anda melawat url %s',
}
|
adityahase/frappe
|
refs/heads/develop
|
frappe/desk/doctype/system_console/system_console.py
|
2
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import json
import frappe
from frappe.utils.safe_exec import safe_exec
from frappe.model.document import Document
class SystemConsole(Document):
def run(self):
frappe.only_for('System Manager')
try:
frappe.debug_log = []
safe_exec(self.console)
self.output = '\n'.join(frappe.debug_log)
except: # noqa: E722
self.output = frappe.get_traceback()
if self.commit:
frappe.db.commit()
else:
frappe.db.rollback()
frappe.get_doc(dict(
doctype='Console Log',
script=self.console,
output=self.output)).insert()
frappe.db.commit()
@frappe.whitelist()
def execute_code(doc):
console = frappe.get_doc(json.loads(doc))
console.run()
return console.as_dict()
|
mdworks2016/work_development
|
refs/heads/master
|
Python/05_FirstPython/Chapter9_WebApp/fppython_develop/lib/python3.7/site-packages/pkg_resources/_vendor/packaging/__init__.py
|
1178
|
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from .__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
|
msebire/intellij-community
|
refs/heads/master
|
python/testData/inspections/TransformClassicClass.py
|
83
|
class <warning descr="Old-style class"><caret>A</warning>:
def foo(self):
pass
def bar(self):
pass
|
FlyRabbit/WeRoBot
|
refs/heads/develop
|
werobot/utils.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import hashlib
import re
import random
import json
import six
import time
from hashlib import sha1
string_types = (six.string_types, six.text_type, six.binary_type)
def get_signature(token, timestamp, nonce, *args):
sign = [token, timestamp, nonce] + list(args)
sign.sort()
sign = to_binary(''.join(sign))
return hashlib.sha1(sign).hexdigest()
def check_signature(token, timestamp, nonce, signature):
sign = get_signature(token, timestamp, nonce)
return sign == signature
def check_token(token):
return re.match('^[A-Za-z0-9]{3,32}$', token)
def to_text(value, encoding="utf-8"):
if isinstance(value, six.text_type):
return value
if isinstance(value, six.binary_type):
return value.decode(encoding)
return six.text_type(value)
def to_binary(value, encoding="utf-8"):
if isinstance(value, six.binary_type):
return value
if isinstance(value, six.text_type):
return value.encode(encoding)
return six.binary_type(value)
def is_string(value):
return isinstance(value, string_types)
def byte2int(s, index=0):
"""Get the ASCII int value of a character in a string.
:param s: a string
:param index: the position of desired character
:return: ASCII int value
"""
if six.PY2:
return ord(s[index])
return s[index]
def generate_token(length=''):
if not length:
length = random.randint(3, 32)
length = int(length)
assert 3 <= length <= 32
token = []
letters = 'abcdefghijklmnopqrstuvwxyz' \
'ABCDEFGHIJKLMNOPQRSTUVWXYZ' \
'0123456789'
for _ in range(length):
token.append(random.choice(letters))
return ''.join(token)
def json_loads(s):
s = to_text(s)
return json.loads(s)
def json_dumps(d):
return json.dumps(d)
def pay_sign_dict(
appid, pay_sign_key, add_noncestr=True,
add_timestamp=True, add_appid=True, **kwargs
):
"""
支付参数签名
"""
assert pay_sign_key, "PAY SIGN KEY IS EMPTY"
if add_appid:
kwargs.update({'appid': appid})
if add_noncestr:
kwargs.update({'noncestr': generate_token()})
if add_timestamp:
kwargs.update({'timestamp': int(time.time())})
params = kwargs.items()
_params = [(k.lower(), v)
for k, v in kwargs.items()
if k.lower() != "appid"]
_params += [('appid', appid), ('appkey', pay_sign_key)]
_params.sort()
sign = sha1('&'.join(["%s=%s" % (str(p[0]), str(p[1]))
for p in _params])).hexdigest()
sign_type = 'SHA1'
return dict(params), sign, sign_type
|
analurandis/Tur
|
refs/heads/master
|
backend/venv/Lib/site-packages/tests/test_cpp_domain.py
|
22
|
# -*- coding: utf-8 -*-
"""
test_cpp_domain
~~~~~~~~~~~~~~~
Tests the C++ Domain
:copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from util import raises
from sphinx.domains.cpp import DefinitionParser, DefinitionError
def parse(name, string):
return getattr(DefinitionParser(string), 'parse_' + name)()
def test_type_definitions():
rv = parse('member_object', ' const std::string & name = 42')
assert unicode(rv) == 'const std::string& name = 42'
rv = parse('member_object', ' const std::string & name leftover')
assert unicode(rv) == 'const std::string& name'
rv = parse('member_object', ' const std::string & name [n] leftover')
assert unicode(rv) == 'const std::string& name[n]'
rv = parse('member_object', 'const std::vector< unsigned int, long> &name')
assert unicode(rv) == 'const std::vector<unsigned int, long>& name'
x = 'std::vector<std::pair<std::string, int>>& module::test(register ' \
'foo, bar, std::string baz="foobar, blah, bleh") const = 0'
assert unicode(parse('function', x)) == x
x = 'module::myclass::operator std::vector<std::string>()'
assert unicode(parse('function', x)) == x
x = 'explicit module::myclass::foo::foo()'
assert unicode(parse('function', x)) == x
x = 'int printf(const char* fmt, ...)'
assert unicode(parse('function', x)) == x
x = 'int foo(const unsigned int j)'
assert unicode(parse('function', x)) == x
x = 'int foo(const unsigned int const j)'
assert unicode(parse('function', x)) == x
x = 'int foo(const int* const ptr)'
assert unicode(parse('function', x)) == x
x = 'std::vector<std::pair<std::string, long long>> module::blah'
assert unicode(parse('type_object', x)) == x
assert unicode(parse('type_object', 'long long int foo')) == 'long long foo'
x = 'void operator()(const boost::array<VertexID, 2>& v) const'
assert unicode(parse('function', x)) == x
x = 'void operator()(const boost::array<VertexID, 2, "foo, bar">& v) const'
assert unicode(parse('function', x)) == x
x = 'MyClass::MyClass(MyClass::MyClass&&)'
assert unicode(parse('function', x)) == x
x = 'constexpr int get_value()'
assert unicode(parse('function', x)) == x
x = 'static constexpr int get_value()'
assert unicode(parse('function', x)) == x
x = 'int get_value() const noexcept'
assert unicode(parse('function', x)) == x
x = 'int get_value() const noexcept = delete'
assert unicode(parse('function', x)) == x
x = 'MyClass::MyClass(MyClass::MyClass&&) = default'
assert unicode(parse('function', x)) == x
x = 'MyClass::a_virtual_function() const override'
assert unicode(parse('function', x)) == x
x = 'MyClass::a_member_function() volatile'
assert unicode(parse('function', x)) == x
x = 'MyClass::a_member_function() const volatile'
assert unicode(parse('function', x)) == x
x = 'MyClass::a_member_function() &&'
assert unicode(parse('function', x)) == x
x = 'MyClass::a_member_function() &'
assert unicode(parse('function', x)) == x
x = 'MyClass::a_member_function() const &'
assert unicode(parse('function', x)) == x
x = 'int main(int argc, char* argv[][])'
assert unicode(parse('function', x)) == x
x = 'std::vector<std::pair<std::string, int>>& module::test(register ' \
'foo, bar[n], std::string baz="foobar, blah, bleh") const = 0'
assert unicode(parse('function', x)) == x
x = 'module::myclass foo[n]'
assert unicode(parse('member_object', x)) == x
x = 'int foo(Foo f=Foo(double(), std::make_pair(int(2), double(3.4))))'
assert unicode(parse('function', x)) == x
x = 'int foo(A a=x(a))'
assert unicode(parse('function', x)) == x
x = 'int foo(B b=x(a)'
raises(DefinitionError, parse, 'function', x)
x = 'int foo)C c=x(a))'
raises(DefinitionError, parse, 'function', x)
x = 'int foo(D d=x(a'
raises(DefinitionError, parse, 'function', x)
def test_bases():
x = 'A'
assert unicode(parse('class', x)) == x
x = 'A : B'
assert unicode(parse('class', x)) == x
x = 'A : private B'
assert unicode(parse('class', x)) == 'A : B'
x = 'A : public B'
assert unicode(parse('class', x)) == x
x = 'A : B, C'
assert unicode(parse('class', x)) == x
x = 'A : B, protected C, D'
assert unicode(parse('class', x)) == x
def test_operators():
x = parse('function', 'void operator new [ ] ()')
assert unicode(x) == 'void operator new[]()'
x = parse('function', 'void operator delete ()')
assert unicode(x) == 'void operator delete()'
for op in '*-+=/%!':
x = parse('function', 'void operator %s ()' % op)
assert unicode(x) == 'void operator%s()' % op
|
aitjcize/micropython
|
refs/heads/master
|
tests/basics/int-divzero.py
|
1
|
try:
1 / 0
except ZeroDivisionError:
print("ZeroDivisionError")
try:
1 // 0
except ZeroDivisionError:
print("ZeroDivisionError")
|
studionow/pybrightcove
|
refs/heads/master
|
pybrightcove/enums.py
|
3
|
# Copyright (c) 2009 StudioNow, Inc <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# pylint: disable=C0111,R0903
class SortByType(object):
"""
PUBLISH_DATE:
Date title was published
CREATION_DATE:
Date title was created.
MODIFIED_DATE
Date title was last modified.
PLAYS_TOTAL
Number of times this title has been viewed.
PLAYS_TRAILING_WEEK
Number of times this title has been viewed in the past 7 days
(excluding today).
"""
PUBLISH_DATE = "PUBLISH_DATE"
CREATION_DATE = "CREATION_DATE"
MODIFIED_DATE = "MODIFIED_DATE"
PLAYS_TOTAL = "PLAYS_TOTAL"
PLAYS_TRAILING_WEEK = "PLAYS_TRAILING_WEEK"
class SortByOrderType(object):
"""
ASC:
Ascending
DESC:
Descending
"""
ASC = "ASC"
DESC = "DESC"
class UploadStatusEnum(object):
"""
UPLOADING:
File is still uploading
PROCESSING:
Upload complete; being processed.
COMPLETE:
Upload and processing complete.
ERROR:
Error in upload or processing.
"""
UPLOADING = "UPLOADING"
PROCESSING = "PROCESSING"
COMPLETE = "COMPLETE"
ERROR = "ERROR"
class EconomicsEnum(object):
FREE = "FREE"
AD_SUPPORTED = "AD_SUPPORTED"
class EncodeToEnum(object):
MP4 = 'MP4'
FLV = 'FLV'
class ItemStateEnum(object):
ACTIVE = "ACTIVE"
INACTIVE = "INACTIVE"
DELETED = "DELETED"
class PlaylistTypeEnum(object):
"""
EXPLICIT:
A manual playlist, the videos of which were added individually.
OLDEST_TO_NEWEST:
A smart playlist, ordered from oldest to newest by last-modified date.
NEWEST_TO_OLDEST:
A smart playlist, ordered from newest to oldest by last-modified date.
ALPHABETICAL:
A smart playlist, ordered alphabetically.
PLAYS_TOTAL:
A smart playlist, ordered by total plays.
PLAYS_TRAILING_WEEK:
A smart playlist, ordered by most plays in the past week.
"""
EXPLICIT = "EXPLICIT"
OLDEST_TO_NEWEST = "OLDEST_TO_NEWEST"
NEWEST_TO_OLDEST = "NEWEST_TO_OLDEST"
ALPHABETICAL = "ALPHABETICAL"
PLAYS_TOTAL = "PLAYS_TOTAL"
PLAYS_TRAILING_WEEK = "PLAYS_TRAILING_WEEK"
class FilterChoicesEnum(object):
PLAYABLE = "PLAYABLE"
UNSCHEDULED = 'UNSCHEDULED'
INACTIVE = 'INACTIVE'
DELETED = 'DELETED'
class VideoCodecEnum(object):
UNDEFINED = "UNDEFINED"
NONE = "NONE"
SORENSON = "SORENSON"
ON2 = "ON2"
H264 = "H264"
class ImageTypeEnum(object):
VIDEO_STILL = "VIDEO_STILL"
SYNDICATION_STILL = "SYNDICATION_STILL"
THUMBNAIL = "THUMBNAIL"
BACKGROUND = "BACKGROUND"
LOGO = "LOGO"
LOGO_OVERLAY = "LOGO_OVERLAY"
class VideoTypeEnum(object):
FLV_PREVIEW = "FLV_PREVIEW"
FLV_FULL = "FLV_FULL"
FLV_BUMPER = "FLV_BUMPER"
DIGITAL_MASTER = "DIGITAL_MASTER"
class AssetTypeEnum(object):
VIDEO_FULL = "VIDEO_FULL"
FLV_BUMPER = "FLV_BUMPER"
THUMBNAIL = "THUMBNAIL"
VIDEO_STILL = "VIDEO_STILL"
BACKGROUND = "BACKGROUND"
LOGO = "LOGO"
LOGO_OVERLAY = "LOGO_OVERLAY"
OTHER_IMAGE = "OTHER_IMAGE"
class CustomMetaType(object):
ENUM = 'enum'
STRING = 'string'
DEFAULT_SORT_BY = SortByType.CREATION_DATE
DEFAULT_SORT_ORDER = SortByOrderType.ASC
|
rohitwaghchaure/frappe
|
refs/heads/develop
|
frappe/core/page/modules_setup/modules_setup.py
|
9
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.desk.doctype.desktop_icon.desktop_icon import set_hidden_list, get_desktop_icons
from frappe.utils.user import UserPermissions
from frappe import _
@frappe.whitelist()
def update(hidden_list, user=None):
"""update modules"""
if not user:
frappe.only_for('System Manager')
set_hidden_list(hidden_list, user)
frappe.msgprint(frappe._('Updated'), indicator='green', title=_('Success'), alert=True)
def get_context(context):
context.icons = get_user_icons(frappe.session.user)
context.user = frappe.session.user
if 'System Manager' in frappe.get_roles():
context.users = frappe.db.get_all('User', filters={'user_type': 'System User', 'enabled': 1},
fields = ['name', 'first_name', 'last_name'])
@frappe.whitelist()
def get_module_icons_html(user=None):
if user != frappe.session.user:
frappe.only_for('System Manager')
if not user:
icons = frappe.db.get_all('Desktop Icon',
fields='*', filters={'standard': 1}, order_by='idx')
else:
frappe.cache().hdel('desktop_icons', user)
icons = get_user_icons(user)
return frappe.render_template('frappe/core/page/modules_setup/includes/module_icons.html',
{'icons': icons, 'user': user})
def get_user_icons(user):
'''Get user icons for module setup page'''
user_perms = UserPermissions(user)
user_perms.build_permissions()
from frappe.boot import get_allowed_pages
allowed_pages = get_allowed_pages()
icons = []
for icon in get_desktop_icons(user):
add = True
if icon.hidden_in_standard:
add = False
if not icon.custom:
if icon.module_name=='Learn':
pass
elif icon.type=="page" and icon.link not in allowed_pages:
add = False
elif icon.type=="module" and icon.module_name not in user_perms.allow_modules:
add = False
if add:
icons.append(icon)
return icons
|
wetneb/django
|
refs/heads/master
|
tests/auth_tests/urls_admin.py
|
47
|
"""
Test URLs for auth admins.
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.admin import GroupAdmin, UserAdmin
from django.contrib.auth.models import Group, User
from django.contrib.auth.urls import urlpatterns
# Create a silo'd admin site for just the user/group admins.
site = admin.AdminSite(name='auth_test_admin')
site.register(User, UserAdmin)
site.register(Group, GroupAdmin)
urlpatterns += [
url(r'^admin/', include(site.urls)),
]
|
pataquets/phantomjs
|
refs/heads/master
|
src/qt/qtwebkit/Tools/Scripts/webkitpy/port/mac.py
|
113
|
# Copyright (C) 2011 Google Inc. All rights reserved.
# Copyright (C) 2012, 2013 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import os
import time
from webkitpy.common.system.crashlogs import CrashLogs
from webkitpy.common.system.executive import ScriptError
from webkitpy.port.apple import ApplePort
from webkitpy.port.leakdetector import LeakDetector
_log = logging.getLogger(__name__)
class MacPort(ApplePort):
port_name = "mac"
VERSION_FALLBACK_ORDER = ['mac-snowleopard', 'mac-lion', 'mac-mountainlion']
ARCHITECTURES = ['x86_64', 'x86']
def __init__(self, host, port_name, **kwargs):
ApplePort.__init__(self, host, port_name, **kwargs)
self._architecture = self.get_option('architecture')
if not self._architecture:
self._architecture = 'x86_64'
self._leak_detector = LeakDetector(self)
if self.get_option("leaks"):
# DumpRenderTree slows down noticably if we run more than about 1000 tests in a batch
# with MallocStackLogging enabled.
self.set_option_default("batch_size", 1000)
def default_timeout_ms(self):
if self.get_option('guard_malloc'):
return 350 * 1000
return super(MacPort, self).default_timeout_ms()
def supports_per_test_timeout(self):
return True
def _build_driver_flags(self):
return ['ARCHS=i386'] if self.architecture() == 'x86' else []
def should_retry_crashes(self):
# On Apple Mac, we retry crashes due to https://bugs.webkit.org/show_bug.cgi?id=82233
return True
def default_baseline_search_path(self):
name = self._name.replace('-wk2', '')
if name.endswith(self.FUTURE_VERSION):
fallback_names = [self.port_name]
else:
fallback_names = self.VERSION_FALLBACK_ORDER[self.VERSION_FALLBACK_ORDER.index(name):-1] + [self.port_name]
if self.get_option('webkit_test_runner'):
fallback_names = [self._wk2_port_name(), 'wk2'] + fallback_names
return map(self._webkit_baseline_path, fallback_names)
def _port_specific_expectations_files(self):
return list(reversed([self._filesystem.join(self._webkit_baseline_path(p), 'TestExpectations') for p in self.baseline_search_path()]))
def setup_environ_for_server(self, server_name=None):
env = super(MacPort, self).setup_environ_for_server(server_name)
if server_name == self.driver_name():
if self.get_option('leaks'):
env['MallocStackLogging'] = '1'
if self.get_option('guard_malloc'):
env['DYLD_INSERT_LIBRARIES'] = '/usr/lib/libgmalloc.dylib:' + self._build_path("libWebCoreTestShim.dylib")
else:
env['DYLD_INSERT_LIBRARIES'] = self._build_path("libWebCoreTestShim.dylib")
env['XML_CATALOG_FILES'] = '' # work around missing /etc/catalog <rdar://problem/4292995>
return env
def operating_system(self):
return 'mac'
# Belongs on a Platform object.
def is_snowleopard(self):
return self._version == "snowleopard"
# Belongs on a Platform object.
def is_lion(self):
return self._version == "lion"
def default_child_processes(self):
if self._version == "snowleopard":
_log.warning("Cannot run tests in parallel on Snow Leopard due to rdar://problem/10621525.")
return 1
default_count = super(MacPort, self).default_child_processes()
# FIXME: https://bugs.webkit.org/show_bug.cgi?id=95906 With too many WebProcess WK2 tests get stuck in resource contention.
# To alleviate the issue reduce the number of running processes
# Anecdotal evidence suggests that a 4 core/8 core logical machine may run into this, but that a 2 core/4 core logical machine does not.
should_throttle_for_wk2 = self.get_option('webkit_test_runner') and default_count > 4
# We also want to throttle for leaks bots.
if should_throttle_for_wk2 or self.get_option('leaks'):
default_count = int(.75 * default_count)
# Make sure we have enough ram to support that many instances:
total_memory = self.host.platform.total_bytes_memory()
if total_memory:
bytes_per_drt = 256 * 1024 * 1024 # Assume each DRT needs 256MB to run.
overhead = 2048 * 1024 * 1024 # Assume we need 2GB free for the O/S
supportable_instances = max((total_memory - overhead) / bytes_per_drt, 1) # Always use one process, even if we don't have space for it.
if supportable_instances < default_count:
_log.warning("This machine could support %s child processes, but only has enough memory for %s." % (default_count, supportable_instances))
else:
_log.warning("Cannot determine available memory for child processes, using default child process count of %s." % default_count)
supportable_instances = default_count
return min(supportable_instances, default_count)
def _build_java_test_support(self):
java_tests_path = self._filesystem.join(self.layout_tests_dir(), "java")
build_java = [self.make_command(), "-C", java_tests_path]
if self._executive.run_command(build_java, return_exit_code=True): # Paths are absolute, so we don't need to set a cwd.
_log.error("Failed to build Java support files: %s" % build_java)
return False
return True
def check_for_leaks(self, process_name, process_pid):
if not self.get_option('leaks'):
return
# We could use http://code.google.com/p/psutil/ to get the process_name from the pid.
self._leak_detector.check_for_leaks(process_name, process_pid)
def print_leaks_summary(self):
if not self.get_option('leaks'):
return
# We're in the manager process, so the leak detector will not have a valid list of leak files.
# FIXME: This is a hack, but we don't have a better way to get this information from the workers yet.
# FIXME: This will include too many leaks in subsequent runs until the results directory is cleared!
leaks_files = self._leak_detector.leaks_files_in_directory(self.results_directory())
if not leaks_files:
return
total_bytes_string, unique_leaks = self._leak_detector.count_total_bytes_and_unique_leaks(leaks_files)
total_leaks = self._leak_detector.count_total_leaks(leaks_files)
_log.info("%s total leaks found for a total of %s!" % (total_leaks, total_bytes_string))
_log.info("%s unique leaks found!" % unique_leaks)
def _check_port_build(self):
return self.get_option('nojava') or self._build_java_test_support()
def _path_to_webcore_library(self):
return self._build_path('WebCore.framework/Versions/A/WebCore')
def show_results_html_file(self, results_filename):
# We don't use self._run_script() because we don't want to wait for the script
# to exit and we want the output to show up on stdout in case there are errors
# launching the browser.
self._executive.popen([self.path_to_script('run-safari')] + self._arguments_for_configuration() + ['--no-saved-state', '-NSOpen', results_filename],
cwd=self.webkit_base(), stdout=file(os.devnull), stderr=file(os.devnull))
# FIXME: The next two routines turn off the http locking in order
# to work around failures on the bots caused when the slave restarts.
# See https://bugs.webkit.org/show_bug.cgi?id=64886 for more info.
# The proper fix is to make sure the slave is actually stopping NRWT
# properly on restart. Note that by removing the lock file and not waiting,
# the result should be that if there is a web server already running,
# it'll be killed and this one will be started in its place; this
# may lead to weird things happening in the other run. However, I don't
# think we're (intentionally) actually running multiple runs concurrently
# on any Mac bots.
def acquire_http_lock(self):
pass
def release_http_lock(self):
pass
def sample_file_path(self, name, pid):
return self._filesystem.join(self.results_directory(), "{0}-{1}-sample.txt".format(name, pid))
def _get_crash_log(self, name, pid, stdout, stderr, newer_than, time_fn=None, sleep_fn=None, wait_for_log=True):
# Note that we do slow-spin here and wait, since it appears the time
# ReportCrash takes to actually write and flush the file varies when there are
# lots of simultaneous crashes going on.
# FIXME: Should most of this be moved into CrashLogs()?
time_fn = time_fn or time.time
sleep_fn = sleep_fn or time.sleep
crash_log = ''
crash_logs = CrashLogs(self.host)
now = time_fn()
# FIXME: delete this after we're sure this code is working ...
_log.debug('looking for crash log for %s:%s' % (name, str(pid)))
deadline = now + 5 * int(self.get_option('child_processes', 1))
while not crash_log and now <= deadline:
crash_log = crash_logs.find_newest_log(name, pid, include_errors=True, newer_than=newer_than)
if not wait_for_log:
break
if not crash_log or not [line for line in crash_log.splitlines() if not line.startswith('ERROR')]:
sleep_fn(0.1)
now = time_fn()
if not crash_log:
return (stderr, None)
return (stderr, crash_log)
def look_for_new_crash_logs(self, crashed_processes, start_time):
"""Since crash logs can take a long time to be written out if the system is
under stress do a second pass at the end of the test run.
crashes: test_name -> pid, process_name tuple of crashed process
start_time: time the tests started at. We're looking for crash
logs after that time.
"""
crash_logs = {}
for (test_name, process_name, pid) in crashed_processes:
# Passing None for output. This is a second pass after the test finished so
# if the output had any logging we would have already collected it.
crash_log = self._get_crash_log(process_name, pid, None, None, start_time, wait_for_log=False)[1]
if not crash_log:
continue
crash_logs[test_name] = crash_log
return crash_logs
def look_for_new_samples(self, unresponsive_processes, start_time):
sample_files = {}
for (test_name, process_name, pid) in unresponsive_processes:
sample_file = self.sample_file_path(process_name, pid)
if not self._filesystem.isfile(sample_file):
continue
sample_files[test_name] = sample_file
return sample_files
def sample_process(self, name, pid):
try:
hang_report = self.sample_file_path(name, pid)
self._executive.run_command([
"/usr/bin/sample",
pid,
10,
10,
"-file",
hang_report,
])
except ScriptError as e:
_log.warning('Unable to sample process:' + str(e))
def _path_to_helper(self):
binary_name = 'LayoutTestHelper'
return self._build_path(binary_name)
def start_helper(self):
helper_path = self._path_to_helper()
if helper_path:
_log.debug("Starting layout helper %s" % helper_path)
self._helper = self._executive.popen([helper_path],
stdin=self._executive.PIPE, stdout=self._executive.PIPE, stderr=None)
is_ready = self._helper.stdout.readline()
if not is_ready.startswith('ready'):
_log.error("LayoutTestHelper failed to be ready")
def stop_helper(self):
if self._helper:
_log.debug("Stopping LayoutTestHelper")
try:
self._helper.stdin.write("x\n")
self._helper.stdin.close()
self._helper.wait()
except IOError, e:
_log.debug("IOError raised while stopping helper: %s" % str(e))
self._helper = None
def make_command(self):
return self.xcrun_find('make', '/usr/bin/make')
def nm_command(self):
return self.xcrun_find('nm', 'nm')
def xcrun_find(self, command, fallback):
try:
return self._executive.run_command(['xcrun', '-find', command]).rstrip()
except ScriptError:
_log.warn("xcrun failed; falling back to '%s'." % fallback)
return fallback
|
amyshi188/osf.io
|
refs/heads/develop
|
framework/transactions/utils.py
|
61
|
# -*- coding: utf-8 -*-
from flask import make_response
from framework.exceptions import HTTPError
from framework.routing import JSONRenderer, render_mako_string
from website.util import is_json_request
def get_error_message(error):
"""Retrieve error message from error, if available.
"""
try:
return error.args[0]
except IndexError:
return ''
def handle_error(code):
"""Display an error thrown outside a routed view function.
:param int code: Error status code
:return: Flask `Response` object
"""
# TODO: Remove circular import
from website.routes import OsfWebRenderer
json_renderer = JSONRenderer()
web_renderer = OsfWebRenderer('', render_mako_string)
error = HTTPError(code)
renderer = json_renderer if is_json_request() else web_renderer
return make_response(renderer.handle_error(error))
|
philsheard/pymarketing
|
refs/heads/master
|
setup.py
|
1
|
from setuptools import setup
setup(name='pymarketing',
version='0.1',
description='Python for Marketing',
url='https://github.com/philsheard/pymarketing',
author='Phil Sheard',
license='GNU GPL v2.0',
packages=['pymarketing'],
zip_safe=False)
|
justathoughtor2/atomicApe
|
refs/heads/encaged
|
cygwin/lib/python2.7/site-packages/pylint/test/functional/too_many_boolean_expressions.py
|
12
|
"""Checks for if statements containing too many boolean expressions"""
# pylint: disable=invalid-name
x = y = z = 5
if x > -5 and x < 5 and y > -5 and y < 5 and z > -5 and z < 5: # [too-many-boolean-expressions]
pass
elif True and False and 1 and 2 and 3:
pass
elif True and False and 1 and 2 and 3 and 4 and 5: # [too-many-boolean-expressions]
pass
elif True and (True and True) and (x == 5 or True or True): # [too-many-boolean-expressions]
pass
elif True and (True or (x > -5 and x < 5 and (z > -5 or z < 5))): # [too-many-boolean-expressions]
pass
elif True == True == True == True == True == True:
pass
if True and False and 1 and 2 and 3:
pass
|
Tranzystorek/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/service-workers/service-worker/resources/update-nocookie-worker.py
|
158
|
import time
def main(request, response):
# no-cache itself to ensure the user agent finds a new version for each update.
headers = [('Cache-Control', 'no-cache, must-revalidate'),
('Pragma', 'no-cache')]
# Set a normal mimetype.
content_type = 'application/javascript'
headers.append(('Content-Type', content_type))
# Return a different script for each access. Use .time() and .clock() for
# best time resolution across different platforms.
return headers, '// %s %s' % (time.time(), time.clock())
|
pycoin/pycoin
|
refs/heads/master
|
pycoin/encoding.py
|
5
|
# -*- coding: utf-8 -*-
"""
Various utilities useful for converting one Bitcoin format to another, including some
the human-transcribable format hashed_base58.
The MIT License (MIT)
Copyright (c) 2013 by Richard Kiss
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import hashlib
from .intbytes import byte_to_int, bytes_from_int
BASE58_ALPHABET = b'123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
BASE58_BASE = len(BASE58_ALPHABET)
BASE58_LOOKUP = dict((c, i) for i, c in enumerate(BASE58_ALPHABET))
class EncodingError(Exception):
pass
def ripemd160(data):
return hashlib.new("ripemd160", data)
try:
ripemd160(b'').digest()
except Exception:
# stupid Google App Engine hashlib doesn't support ripemd160 for some stupid reason
# import it from pycrypto. You need to add
# - name: pycrypto
# version: "latest"
# to the "libraries" section of your app.yaml
from Crypto.Hash.RIPEMD import RIPEMD160Hash as ripemd160
def to_long(base, lookup_f, s):
"""
Convert an array to a (possibly bignum) integer, along with a prefix value
of how many prefixed zeros there are.
base:
the source base
lookup_f:
a function to convert an element of s to a value between 0 and base-1.
s:
the value to convert
"""
prefix = 0
v = 0
for c in s:
v *= base
try:
v += lookup_f(c)
except Exception:
raise EncodingError("bad character %s in string %s" % (c, s))
if v == 0:
prefix += 1
return v, prefix
def from_long(v, prefix, base, charset):
"""The inverse of to_long. Convert an integer to an arbitrary base.
v: the integer value to convert
prefix: the number of prefixed 0s to include
base: the new base
charset: an array indicating what printable character to use for each value.
"""
l = bytearray()
while v > 0:
try:
v, mod = divmod(v, base)
l.append(charset(mod))
except Exception:
raise EncodingError("can't convert to character corresponding to %d" % mod)
l.extend([charset(0)] * prefix)
l.reverse()
return bytes(l)
def to_bytes_32(v):
v = from_long(v, 0, 256, lambda x: x)
if len(v) > 32:
raise ValueError("input to to_bytes_32 is too large")
return ((b'\0' * 32) + v)[-32:]
if hasattr(int, "to_bytes"):
to_bytes_32 = lambda v: v.to_bytes(32, byteorder="big")
def from_bytes_32(v):
if len(v) > 32:
raise OverflowError("int too big to convert")
return to_long(256, byte_to_int, v)[0]
if hasattr(int, "from_bytes"):
from_bytes_32 = lambda v: int.from_bytes(v, byteorder="big")
def double_sha256(data):
"""A standard compound hash."""
return hashlib.sha256(hashlib.sha256(data).digest()).digest()
def hash160(data):
"""A standard compound hash."""
return ripemd160(hashlib.sha256(data).digest()).digest()
def b2a_base58(s):
"""Convert binary to base58 using BASE58_ALPHABET. Like Bitcoin addresses."""
v, prefix = to_long(256, byte_to_int, s)
s = from_long(v, prefix, BASE58_BASE, lambda v: BASE58_ALPHABET[v])
return s.decode("utf8")
def a2b_base58(s):
"""Convert base58 to binary using BASE58_ALPHABET."""
v, prefix = to_long(BASE58_BASE, lambda c: BASE58_LOOKUP[c], s.encode("utf8"))
return from_long(v, prefix, 256, lambda x: x)
def b2a_hashed_base58(data):
"""
A "hashed_base58" structure is a base58 integer (which looks like a string)
with four bytes of hash data at the end. Bitcoin does this in several places,
including Bitcoin addresses.
This function turns data (of type "bytes") into its hashed_base58 equivalent.
"""
return b2a_base58(data + double_sha256(data)[:4])
def a2b_hashed_base58(s):
"""
If the passed string is hashed_base58, return the binary data.
Otherwise raises an EncodingError.
"""
data = a2b_base58(s)
data, the_hash = data[:-4], data[-4:]
if double_sha256(data)[:4] == the_hash:
return data
raise EncodingError("hashed base58 has bad checksum %s" % s)
def is_hashed_base58_valid(base58):
"""Return True if and only if base58 is valid hashed_base58."""
try:
a2b_hashed_base58(base58)
except EncodingError:
return False
return True
def wif_to_tuple_of_prefix_secret_exponent_compressed(wif):
"""
Return a tuple of (prefix, secret_exponent, is_compressed).
"""
decoded = a2b_hashed_base58(wif)
actual_prefix, private_key = decoded[:1], decoded[1:]
compressed = len(private_key) > 32
return actual_prefix, from_bytes_32(private_key[:32]), compressed
def wif_to_tuple_of_secret_exponent_compressed(wif, allowable_wif_prefixes=[b'\x80']):
"""Convert a WIF string to the corresponding secret exponent. Private key manipulation.
Returns a tuple: the secret exponent, as a bignum integer, and a boolean indicating if the
WIF corresponded to a compressed key or not.
Not that it matters, since we can use the secret exponent to generate both the compressed
and uncompressed Bitcoin address."""
actual_prefix, secret_exponent, is_compressed = wif_to_tuple_of_prefix_secret_exponent_compressed(wif)
if actual_prefix not in allowable_wif_prefixes:
raise EncodingError("unexpected first byte of WIF %s" % wif)
return secret_exponent, is_compressed
def wif_to_secret_exponent(wif, allowable_wif_prefixes=[b'\x80']):
"""Convert a WIF string to the corresponding secret exponent."""
return wif_to_tuple_of_secret_exponent_compressed(wif, allowable_wif_prefixes=allowable_wif_prefixes)[0]
def is_valid_wif(wif, allowable_wif_prefixes=[b'\x80']):
"""Return a boolean indicating if the WIF is valid."""
try:
wif_to_secret_exponent(wif, allowable_wif_prefixes=allowable_wif_prefixes)
except EncodingError:
return False
return True
def secret_exponent_to_wif(secret_exp, compressed=True, wif_prefix=b'\x80'):
"""Convert a secret exponent (correspdong to a private key) to WIF format."""
d = wif_prefix + to_bytes_32(secret_exp)
if compressed:
d += b'\01'
return b2a_hashed_base58(d)
def public_pair_to_sec(public_pair, compressed=True):
"""Convert a public pair (a pair of bignums corresponding to a public key) to the
gross internal sec binary format used by OpenSSL."""
x_str = to_bytes_32(public_pair[0])
if compressed:
return bytes_from_int((2 + (public_pair[1] & 1))) + x_str
y_str = to_bytes_32(public_pair[1])
return b'\4' + x_str + y_str
def sec_to_public_pair(sec, strict=True):
"""Convert a public key in sec binary format to a public pair."""
x = from_bytes_32(sec[1:33])
sec0 = sec[:1]
if len(sec) == 65:
isok = sec0 == b'\4'
if not strict:
isok = isok or (sec0 in [b'\6', b'\7'])
if isok:
y = from_bytes_32(sec[33:65])
return (x, y)
elif len(sec) == 33:
if not strict or (sec0 in (b'\2', b'\3')):
from .ecdsa import public_pair_for_x, generator_secp256k1
return public_pair_for_x(generator_secp256k1, x, is_even=(sec0 == b'\2'))
raise EncodingError("bad sec encoding for public key")
def is_sec_compressed(sec):
"""Return a boolean indicating if the sec represents a compressed public key."""
return sec[:1] in (b'\2', b'\3')
def public_pair_to_hash160_sec(public_pair, compressed=True):
"""Convert a public_pair (corresponding to a public key) to hash160_sec format.
This is a hash of the sec representation of a public key, and is used to generate
the corresponding Bitcoin address."""
return hash160(public_pair_to_sec(public_pair, compressed=compressed))
def hash160_sec_to_bitcoin_address(hash160_sec, address_prefix=b'\0'):
"""Convert the hash160 of a sec version of a public_pair to a Bitcoin address."""
return b2a_hashed_base58(address_prefix + hash160_sec)
def bitcoin_address_to_hash160_sec_with_prefix(bitcoin_address):
"""
Convert a Bitcoin address back to the hash160_sec format and
also return the prefix.
"""
blob = a2b_hashed_base58(bitcoin_address)
if len(blob) != 21:
raise EncodingError("incorrect binary length (%d) for Bitcoin address %s" %
(len(blob), bitcoin_address))
if blob[:1] not in [b'\x6f', b'\0']:
raise EncodingError("incorrect first byte (%s) for Bitcoin address %s" % (blob[0], bitcoin_address))
return blob[1:], blob[:1]
def bitcoin_address_to_hash160_sec(bitcoin_address, address_prefix=b'\0'):
"""Convert a Bitcoin address back to the hash160_sec format of the public key.
Since we only know the hash of the public key, we can't get the full public key back."""
hash160, actual_prefix = bitcoin_address_to_hash160_sec_with_prefix(bitcoin_address)
if (address_prefix == actual_prefix):
return hash160
raise EncodingError("Bitcoin address %s for wrong network" % bitcoin_address)
def public_pair_to_bitcoin_address(public_pair, compressed=True, address_prefix=b'\0'):
"""Convert a public_pair (corresponding to a public key) to a Bitcoin address."""
return hash160_sec_to_bitcoin_address(public_pair_to_hash160_sec(
public_pair, compressed=compressed), address_prefix=address_prefix)
def is_valid_bitcoin_address(bitcoin_address, allowable_prefixes=b'\0'):
"""Return True if and only if bitcoin_address is valid."""
try:
hash160, prefix = bitcoin_address_to_hash160_sec_with_prefix(bitcoin_address)
except EncodingError:
return False
return prefix in allowable_prefixes
|
aborilov/txjsonrpc
|
refs/heads/master
|
txjsonrpc/scripts/getVersion.py
|
4
|
from txjsonrpc import meta
print meta.version
|
gaboflowers/mallador_v3
|
refs/heads/master
|
unidecode/x021.py
|
62
|
data = (
'', # 0x00
'', # 0x01
'C', # 0x02
'', # 0x03
'', # 0x04
'', # 0x05
'', # 0x06
'', # 0x07
'', # 0x08
'', # 0x09
'', # 0x0a
'', # 0x0b
'', # 0x0c
'H', # 0x0d
'', # 0x0e
'', # 0x0f
'', # 0x10
'', # 0x11
'', # 0x12
'', # 0x13
'', # 0x14
'N', # 0x15
'', # 0x16
'', # 0x17
'', # 0x18
'P', # 0x19
'Q', # 0x1a
'', # 0x1b
'', # 0x1c
'R', # 0x1d
'', # 0x1e
'', # 0x1f
'(sm)', # 0x20
'TEL', # 0x21
'(tm)', # 0x22
'', # 0x23
'Z', # 0x24
'', # 0x25
'', # 0x26
'', # 0x27
'', # 0x28
'', # 0x29
'K', # 0x2a
'A', # 0x2b
'', # 0x2c
'', # 0x2d
'e', # 0x2e
'e', # 0x2f
'E', # 0x30
'F', # 0x31
'F', # 0x32
'M', # 0x33
'', # 0x34
'', # 0x35
'', # 0x36
'', # 0x37
'', # 0x38
'', # 0x39
'', # 0x3a
'FAX', # 0x3b
'', # 0x3c
'', # 0x3d
'', # 0x3e
'', # 0x3f
'[?]', # 0x40
'[?]', # 0x41
'[?]', # 0x42
'[?]', # 0x43
'[?]', # 0x44
'D', # 0x45
'd', # 0x46
'e', # 0x47
'i', # 0x48
'j', # 0x49
'[?]', # 0x4a
'[?]', # 0x4b
'[?]', # 0x4c
'[?]', # 0x4d
'F', # 0x4e
'[?]', # 0x4f
'[?]', # 0x50
'[?]', # 0x51
'[?]', # 0x52
' 1/3 ', # 0x53
' 2/3 ', # 0x54
' 1/5 ', # 0x55
' 2/5 ', # 0x56
' 3/5 ', # 0x57
' 4/5 ', # 0x58
' 1/6 ', # 0x59
' 5/6 ', # 0x5a
' 1/8 ', # 0x5b
' 3/8 ', # 0x5c
' 5/8 ', # 0x5d
' 7/8 ', # 0x5e
' 1/', # 0x5f
'I', # 0x60
'II', # 0x61
'III', # 0x62
'IV', # 0x63
'V', # 0x64
'VI', # 0x65
'VII', # 0x66
'VIII', # 0x67
'IX', # 0x68
'X', # 0x69
'XI', # 0x6a
'XII', # 0x6b
'L', # 0x6c
'C', # 0x6d
'D', # 0x6e
'M', # 0x6f
'i', # 0x70
'ii', # 0x71
'iii', # 0x72
'iv', # 0x73
'v', # 0x74
'vi', # 0x75
'vii', # 0x76
'viii', # 0x77
'ix', # 0x78
'x', # 0x79
'xi', # 0x7a
'xii', # 0x7b
'l', # 0x7c
'c', # 0x7d
'd', # 0x7e
'm', # 0x7f
'(D', # 0x80
'D)', # 0x81
'((|))', # 0x82
')', # 0x83
'[?]', # 0x84
'[?]', # 0x85
'[?]', # 0x86
'[?]', # 0x87
'[?]', # 0x88
'[?]', # 0x89
'[?]', # 0x8a
'[?]', # 0x8b
'[?]', # 0x8c
'[?]', # 0x8d
'[?]', # 0x8e
'[?]', # 0x8f
'-', # 0x90
'|', # 0x91
'-', # 0x92
'|', # 0x93
'-', # 0x94
'|', # 0x95
'\\', # 0x96
'/', # 0x97
'\\', # 0x98
'/', # 0x99
'-', # 0x9a
'-', # 0x9b
'~', # 0x9c
'~', # 0x9d
'-', # 0x9e
'|', # 0x9f
'-', # 0xa0
'|', # 0xa1
'-', # 0xa2
'-', # 0xa3
'-', # 0xa4
'|', # 0xa5
'-', # 0xa6
'|', # 0xa7
'|', # 0xa8
'-', # 0xa9
'-', # 0xaa
'-', # 0xab
'-', # 0xac
'-', # 0xad
'-', # 0xae
'|', # 0xaf
'|', # 0xb0
'|', # 0xb1
'|', # 0xb2
'|', # 0xb3
'|', # 0xb4
'|', # 0xb5
'^', # 0xb6
'V', # 0xb7
'\\', # 0xb8
'=', # 0xb9
'V', # 0xba
'^', # 0xbb
'-', # 0xbc
'-', # 0xbd
'|', # 0xbe
'|', # 0xbf
'-', # 0xc0
'-', # 0xc1
'|', # 0xc2
'|', # 0xc3
'=', # 0xc4
'|', # 0xc5
'=', # 0xc6
'=', # 0xc7
'|', # 0xc8
'=', # 0xc9
'|', # 0xca
'=', # 0xcb
'=', # 0xcc
'=', # 0xcd
'=', # 0xce
'=', # 0xcf
'=', # 0xd0
'|', # 0xd1
'=', # 0xd2
'|', # 0xd3
'=', # 0xd4
'|', # 0xd5
'\\', # 0xd6
'/', # 0xd7
'\\', # 0xd8
'/', # 0xd9
'=', # 0xda
'=', # 0xdb
'~', # 0xdc
'~', # 0xdd
'|', # 0xde
'|', # 0xdf
'-', # 0xe0
'|', # 0xe1
'-', # 0xe2
'|', # 0xe3
'-', # 0xe4
'-', # 0xe5
'-', # 0xe6
'|', # 0xe7
'-', # 0xe8
'|', # 0xe9
'|', # 0xea
'|', # 0xeb
'|', # 0xec
'|', # 0xed
'|', # 0xee
'|', # 0xef
'-', # 0xf0
'\\', # 0xf1
'\\', # 0xf2
'|', # 0xf3
'[?]', # 0xf4
'[?]', # 0xf5
'[?]', # 0xf6
'[?]', # 0xf7
'[?]', # 0xf8
'[?]', # 0xf9
'[?]', # 0xfa
'[?]', # 0xfb
'[?]', # 0xfc
'[?]', # 0xfd
'[?]', # 0xfe
)
|
GunoH/intellij-community
|
refs/heads/master
|
python/testData/intentions/PyAnnotateVariableTypeIntentionTest/typeCommentLocalWithTarget.py
|
38
|
def func():
with open('file.txt') as var:
v<caret>ar
|
ryfeus/lambda-packs
|
refs/heads/master
|
Rasterio_osgeo_shapely_PIL_pyproj_numpy/source/numpy/distutils/command/egg_info.py
|
30
|
from __future__ import division, absolute_import, print_function
import sys
from setuptools.command.egg_info import egg_info as _egg_info
class egg_info(_egg_info):
def run(self):
if 'sdist' in sys.argv:
import warnings
warnings.warn("`build_src` is being run, this may lead to missing "
"files in your sdist! See numpy issue gh-7127 for "
"details", UserWarning)
# We need to ensure that build_src has been executed in order to give
# setuptools' egg_info command real filenames instead of functions which
# generate files.
self.run_command("build_src")
_egg_info.run(self)
|
bholley/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/manifest/item.py
|
148
|
import urlparse
from abc import ABCMeta, abstractmethod, abstractproperty
item_types = ["testharness", "reftest", "manual", "stub", "wdspec"]
def get_source_file(source_files, tests_root, manifest, path):
def make_new():
from sourcefile import SourceFile
return SourceFile(tests_root, path, manifest.url_base)
if source_files is None:
return make_new()
if path not in source_files:
source_files[path] = make_new()
return source_files[path]
class ManifestItem(object):
__metaclass__ = ABCMeta
item_type = None
def __init__(self, source_file, manifest=None):
self.manifest = manifest
self.source_file = source_file
@abstractproperty
def id(self):
"""The test's id (usually its url)"""
pass
@property
def path(self):
"""The test path relative to the test_root"""
return self.source_file.rel_path
@property
def https(self):
return "https" in self.source_file.meta_flags
def key(self):
"""A unique identifier for the test"""
return (self.item_type, self.id)
def meta_key(self):
"""Extra metadata that doesn't form part of the test identity, but for
which changes mean regenerating the manifest (e.g. the test timeout."""
return ()
def __eq__(self, other):
if not hasattr(other, "key"):
return False
return self.key() == other.key()
def __hash__(self):
return hash(self.key() + self.meta_key())
def to_json(self):
return {"path": self.path}
@classmethod
def from_json(self, manifest, tests_root, obj, source_files=None):
raise NotImplementedError
class URLManifestItem(ManifestItem):
def __init__(self, source_file, url, url_base="/", manifest=None):
ManifestItem.__init__(self, source_file, manifest=manifest)
self._url = url
self.url_base = url_base
@property
def id(self):
return self.url
@property
def url(self):
return urlparse.urljoin(self.url_base, self._url)
def to_json(self):
rv = ManifestItem.to_json(self)
rv["url"] = self._url
return rv
@classmethod
def from_json(cls, manifest, tests_root, obj, source_files=None):
source_file = get_source_file(source_files, tests_root, manifest, obj["path"])
return cls(source_file,
obj["url"],
url_base=manifest.url_base,
manifest=manifest)
class TestharnessTest(URLManifestItem):
item_type = "testharness"
def __init__(self, source_file, url, url_base="/", timeout=None, manifest=None):
URLManifestItem.__init__(self, source_file, url, url_base=url_base, manifest=manifest)
self.timeout = timeout
def meta_key(self):
return (self.timeout,)
def to_json(self):
rv = URLManifestItem.to_json(self)
if self.timeout is not None:
rv["timeout"] = self.timeout
return rv
@classmethod
def from_json(cls, manifest, tests_root, obj, source_files=None):
source_file = get_source_file(source_files, tests_root, manifest, obj["path"])
return cls(source_file,
obj["url"],
url_base=manifest.url_base,
timeout=obj.get("timeout"),
manifest=manifest)
class RefTest(URLManifestItem):
item_type = "reftest"
def __init__(self, source_file, url, references, url_base="/", timeout=None,
manifest=None):
URLManifestItem.__init__(self, source_file, url, url_base=url_base, manifest=manifest)
for _, ref_type in references:
if ref_type not in ["==", "!="]:
raise ValueError, "Unrecognised ref_type %s" % ref_type
self.references = tuple(references)
self.timeout = timeout
@property
def is_reference(self):
return self.source_file.name_is_reference
def meta_key(self):
return (self.timeout,)
def to_json(self):
rv = URLManifestItem.to_json(self)
rv["references"] = self.references
if self.timeout is not None:
rv["timeout"] = self.timeout
return rv
@classmethod
def from_json(cls, manifest, tests_root, obj, source_files=None):
source_file = get_source_file(source_files, tests_root, manifest, obj["path"])
return cls(source_file,
obj["url"],
obj["references"],
url_base=manifest.url_base,
timeout=obj.get("timeout"),
manifest=manifest)
class ManualTest(URLManifestItem):
item_type = "manual"
class Stub(URLManifestItem):
item_type = "stub"
class WebdriverSpecTest(ManifestItem):
item_type = "wdspec"
@property
def id(self):
return self.path
@classmethod
def from_json(cls, manifest, tests_root, obj, source_files=None):
source_file = get_source_file(source_files, tests_root, manifest, obj["path"])
return cls(source_file, manifest=manifest)
|
drakeet/shadowsocks
|
refs/heads/master
|
utils/autoban.py
|
1033
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 clowwindy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='See README')
parser.add_argument('-c', '--count', default=3, type=int,
help='with how many failure times it should be '
'considered as an attack')
config = parser.parse_args()
ips = {}
banned = set()
for line in sys.stdin:
if 'can not parse header when' in line:
ip = line.split()[-1].split(':')[0]
if ip not in ips:
ips[ip] = 1
print(ip)
sys.stdout.flush()
else:
ips[ip] += 1
if ip not in banned and ips[ip] >= config.count:
banned.add(ip)
cmd = 'iptables -A INPUT -s %s -j DROP' % ip
print(cmd, file=sys.stderr)
sys.stderr.flush()
os.system(cmd)
|
arienchen/pytibrv
|
refs/heads/master
|
examples/python/tibrvlisten.py
|
1
|
##
# tibrvlisten.py
# rewrite TIBRV example: tibrvlisten.c
# using Python Object Model
#
# LAST MODIFIED: V1.0 2016-12-22 ARIEN [email protected]
#
import sys
import getopt
from pytibrv.events import *
def usage() :
print()
print("tibrvlisten.py [--service service] [--network network]")
print(" [--daemon daemon] <subject> ")
print()
sys.exit(1)
def get_params(argv):
try:
opts, args = getopt.getopt(argv, '', ['service=', 'network=', 'daemon='])
except getopt.GetoptError:
usage()
service = None
network = None
daemon = None
for opt, arg in opts:
if opt == '--service':
service = arg
elif opt == '--network':
network = arg
elif opt == '--daemon':
daemon = arg
else:
usage()
if len(args) != 1:
usage()
return service, network, daemon, args[0]
def my_callback(event, msg, closure):
localTime, gmtTime = TibrvMsg.nowString()
if msg.replySubject is not None:
print("{} ({}): subject={}, reply={}, message={}".format(
localTime, gmtTime, msg.sendSubject, msg.replySubject, str(msg)));
else:
print("{} ({}): subject={}, message={}".format(
localTime, gmtTime, msg.sendSubject, str(msg)));
# MAIN PROGRAM
def main(argv):
progname = argv[0]
service, network, daemon, subj = get_params(argv[1:])
err = Tibrv.open()
if err != TIBRV_OK:
print('{}: Failed to open TIB/RV: {}'.format('', progname, TibrvStatus.text(err)))
sys.exit(1);
tx = TibrvTx()
err = tx.create(service, network, daemon)
if err != TIBRV_OK:
print('{}: Failed to initialize transport: {}'.format('', progname, TibrvStatus.text(err)))
sys.exit(1)
tx.description = progname
print("tibrvlisten: Listening to subject {}".format(subj))
def_que = TibrvQueue()
listener = TibrvListener()
err = listener.create(def_que, TibrvMsgCallback(my_callback), tx, subj, None)
if err != TIBRV_OK:
print('{}: Error {} listening to {}'.format('', progname, TibrvStatus.text(err), subj))
sys.exit(2)
while def_que.dispatch() == TIBRV_OK:
pass
# In Linux/OSX
# CTRL-C will not interrupt the process
# CTRL-\ (SIGQUIT) would work
del listener
del tx
Tibrv.close()
sys.exit(0)
return
if __name__ == "__main__":
main(sys.argv)
|
waltBB/neutron_read
|
refs/heads/master
|
neutron/tests/unit/test_metadata_namespace_proxy.py
|
21
|
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import testtools
import webob
from neutron.agent.linux import utils as agent_utils
from neutron.agent.metadata import namespace_proxy as ns_proxy
from neutron.common import exceptions
from neutron.common import utils
from neutron.tests import base
class FakeConf(object):
admin_user = 'neutron'
admin_password = 'password'
admin_tenant_name = 'tenant'
auth_url = 'http://127.0.0.1'
auth_strategy = 'keystone'
auth_region = 'region'
nova_metadata_ip = '9.9.9.9'
nova_metadata_port = 8775
metadata_proxy_shared_secret = 'secret'
class TestNetworkMetadataProxyHandler(base.BaseTestCase):
def setUp(self):
super(TestNetworkMetadataProxyHandler, self).setUp()
self.log_p = mock.patch.object(ns_proxy, 'LOG')
self.log = self.log_p.start()
self.handler = ns_proxy.NetworkMetadataProxyHandler('router_id')
def test_call(self):
req = mock.Mock(headers={})
with mock.patch.object(self.handler, '_proxy_request') as proxy_req:
proxy_req.return_value = 'value'
retval = self.handler(req)
self.assertEqual(retval, 'value')
proxy_req.assert_called_once_with(req.remote_addr,
req.method,
req.path_info,
req.query_string,
req.body)
def test_no_argument_passed_to_init(self):
with testtools.ExpectedException(
exceptions.NetworkIdOrRouterIdRequiredError):
ns_proxy.NetworkMetadataProxyHandler()
def test_call_internal_server_error(self):
req = mock.Mock(headers={})
with mock.patch.object(self.handler, '_proxy_request') as proxy_req:
proxy_req.side_effect = Exception
retval = self.handler(req)
self.assertIsInstance(retval, webob.exc.HTTPInternalServerError)
self.assertEqual(len(self.log.mock_calls), 2)
self.assertTrue(proxy_req.called)
def test_proxy_request_router_200(self):
self.handler.router_id = 'router_id'
resp = mock.MagicMock(status=200)
with mock.patch('httplib2.Http') as mock_http:
resp.__getitem__.return_value = "text/plain"
mock_http.return_value.request.return_value = (resp, 'content')
retval = self.handler._proxy_request('192.168.1.1',
'GET',
'/latest/meta-data',
'',
'')
mock_http.assert_has_calls([
mock.call().request(
'http://169.254.169.254/latest/meta-data',
method='GET',
headers={
'X-Forwarded-For': '192.168.1.1',
'X-Neutron-Router-ID': 'router_id'
},
connection_type=agent_utils.UnixDomainHTTPConnection,
body=''
)]
)
self.assertEqual(retval.headers['Content-Type'], 'text/plain')
self.assertEqual(retval.body, 'content')
def test_proxy_request_network_200(self):
self.handler.network_id = 'network_id'
resp = mock.MagicMock(status=200)
with mock.patch('httplib2.Http') as mock_http:
resp.__getitem__.return_value = "application/json"
mock_http.return_value.request.return_value = (resp, '{}')
retval = self.handler._proxy_request('192.168.1.1',
'GET',
'/latest/meta-data',
'',
'')
mock_http.assert_has_calls([
mock.call().request(
'http://169.254.169.254/latest/meta-data',
method='GET',
headers={
'X-Forwarded-For': '192.168.1.1',
'X-Neutron-Network-ID': 'network_id'
},
connection_type=agent_utils.UnixDomainHTTPConnection,
body=''
)]
)
self.assertEqual(retval.headers['Content-Type'],
'application/json')
self.assertEqual(retval.body, '{}')
def _test_proxy_request_network_4xx(self, status, method, expected):
self.handler.network_id = 'network_id'
resp = mock.Mock(status=status)
with mock.patch('httplib2.Http') as mock_http:
mock_http.return_value.request.return_value = (resp, '')
retval = self.handler._proxy_request('192.168.1.1',
method,
'/latest/meta-data',
'',
'')
mock_http.assert_has_calls([
mock.call().request(
'http://169.254.169.254/latest/meta-data',
method=method,
headers={
'X-Forwarded-For': '192.168.1.1',
'X-Neutron-Network-ID': 'network_id'
},
connection_type=agent_utils.UnixDomainHTTPConnection,
body=''
)]
)
self.assertIsInstance(retval, expected)
def test_proxy_request_network_400(self):
self._test_proxy_request_network_4xx(
400, 'GET', webob.exc.HTTPBadRequest)
def test_proxy_request_network_404(self):
self._test_proxy_request_network_4xx(
404, 'GET', webob.exc.HTTPNotFound)
def test_proxy_request_network_409(self):
self._test_proxy_request_network_4xx(
409, 'POST', webob.exc.HTTPConflict)
def test_proxy_request_network_500(self):
self.handler.network_id = 'network_id'
resp = mock.Mock(status=500)
with mock.patch('httplib2.Http') as mock_http:
mock_http.return_value.request.return_value = (resp, '')
retval = self.handler._proxy_request('192.168.1.1',
'GET',
'/latest/meta-data',
'',
'')
mock_http.assert_has_calls([
mock.call().request(
'http://169.254.169.254/latest/meta-data',
method='GET',
headers={
'X-Forwarded-For': '192.168.1.1',
'X-Neutron-Network-ID': 'network_id'
},
connection_type=agent_utils.UnixDomainHTTPConnection,
body=''
)]
)
self.assertIsInstance(retval, webob.exc.HTTPInternalServerError)
def test_proxy_request_network_418(self):
self.handler.network_id = 'network_id'
resp = mock.Mock(status=418)
with mock.patch('httplib2.Http') as mock_http:
mock_http.return_value.request.return_value = (resp, '')
with testtools.ExpectedException(Exception):
self.handler._proxy_request('192.168.1.1',
'GET',
'/latest/meta-data',
'',
'')
mock_http.assert_has_calls([
mock.call().request(
'http://169.254.169.254/latest/meta-data',
method='GET',
headers={
'X-Forwarded-For': '192.168.1.1',
'X-Neutron-Network-ID': 'network_id'
},
connection_type=agent_utils.UnixDomainHTTPConnection,
body=''
)]
)
def test_proxy_request_network_exception(self):
self.handler.network_id = 'network_id'
mock.Mock(status=500)
with mock.patch('httplib2.Http') as mock_http:
mock_http.return_value.request.side_effect = Exception
with testtools.ExpectedException(Exception):
self.handler._proxy_request('192.168.1.1',
'GET',
'/latest/meta-data',
'',
'')
mock_http.assert_has_calls([
mock.call().request(
'http://169.254.169.254/latest/meta-data',
method='GET',
headers={
'X-Forwarded-For': '192.168.1.1',
'X-Neutron-Network-ID': 'network_id'
},
connection_type=agent_utils.UnixDomainHTTPConnection,
body=''
)]
)
class TestProxyDaemon(base.BaseTestCase):
def test_init(self):
with mock.patch('neutron.agent.linux.daemon.Pidfile'):
pd = ns_proxy.ProxyDaemon('pidfile', 9697, 'net_id', 'router_id')
self.assertEqual(pd.router_id, 'router_id')
self.assertEqual(pd.network_id, 'net_id')
def test_run(self):
with mock.patch('neutron.agent.linux.daemon.Pidfile'):
with mock.patch('neutron.wsgi.Server') as Server:
pd = ns_proxy.ProxyDaemon('pidfile', 9697, 'net_id',
'router_id')
pd.run()
Server.assert_has_calls([
mock.call('neutron-network-metadata-proxy'),
mock.call().start(mock.ANY, 9697),
mock.call().wait()]
)
def test_main(self):
with mock.patch.object(ns_proxy, 'ProxyDaemon') as daemon:
with mock.patch.object(ns_proxy, 'config') as config:
with mock.patch.object(ns_proxy, 'cfg') as cfg:
with mock.patch.object(utils, 'cfg') as utils_cfg:
cfg.CONF.router_id = 'router_id'
cfg.CONF.network_id = None
cfg.CONF.metadata_port = 9697
cfg.CONF.pid_file = 'pidfile'
cfg.CONF.daemonize = True
utils_cfg.CONF.log_opt_values.return_value = None
ns_proxy.main()
self.assertTrue(config.setup_logging.called)
daemon.assert_has_calls([
mock.call('pidfile', 9697,
router_id='router_id',
network_id=None,
user=mock.ANY,
group=mock.ANY,
watch_log=mock.ANY),
mock.call().start()]
)
def test_main_dont_fork(self):
with mock.patch.object(ns_proxy, 'ProxyDaemon') as daemon:
with mock.patch.object(ns_proxy, 'config') as config:
with mock.patch.object(ns_proxy, 'cfg') as cfg:
with mock.patch.object(utils, 'cfg') as utils_cfg:
cfg.CONF.router_id = 'router_id'
cfg.CONF.network_id = None
cfg.CONF.metadata_port = 9697
cfg.CONF.pid_file = 'pidfile'
cfg.CONF.daemonize = False
utils_cfg.CONF.log_opt_values.return_value = None
ns_proxy.main()
self.assertTrue(config.setup_logging.called)
daemon.assert_has_calls([
mock.call('pidfile', 9697,
router_id='router_id',
network_id=None,
user=mock.ANY,
group=mock.ANY,
watch_log=mock.ANY),
mock.call().run()]
)
|
ff94315/hiwifi-openwrt-HC5661-HC5761
|
refs/heads/master
|
staging_dir/target-mipsel_r2_uClibc-0.9.33.2/root-ralink/usr/lib/python2.7/Cookie.py
|
65
|
#!/usr/bin/env python
#
####
# Copyright 2000 by Timothy O'Malley <[email protected]>
#
# All Rights Reserved
#
# Permission to use, copy, modify, and distribute this software
# and its documentation for any purpose and without fee is hereby
# granted, provided that the above copyright notice appear in all
# copies and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Timothy O'Malley not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR
# ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
#
####
#
# Id: Cookie.py,v 2.29 2000/08/23 05:28:49 timo Exp
# by Timothy O'Malley <[email protected]>
#
# Cookie.py is a Python module for the handling of HTTP
# cookies as a Python dictionary. See RFC 2109 for more
# information on cookies.
#
# The original idea to treat Cookies as a dictionary came from
# Dave Mitchell ([email protected]) in 1995, when he released the
# first version of nscookie.py.
#
####
r"""
Here's a sample session to show how to use this module.
At the moment, this is the only documentation.
The Basics
----------
Importing is easy..
>>> import Cookie
Most of the time you start by creating a cookie. Cookies come in
three flavors, each with slightly different encoding semantics, but
more on that later.
>>> C = Cookie.SimpleCookie()
>>> C = Cookie.SerialCookie()
>>> C = Cookie.SmartCookie()
[Note: Long-time users of Cookie.py will remember using
Cookie.Cookie() to create an Cookie object. Although deprecated, it
is still supported by the code. See the Backward Compatibility notes
for more information.]
Once you've created your Cookie, you can add values just as if it were
a dictionary.
>>> C = Cookie.SmartCookie()
>>> C["fig"] = "newton"
>>> C["sugar"] = "wafer"
>>> C.output()
'Set-Cookie: fig=newton\r\nSet-Cookie: sugar=wafer'
Notice that the printable representation of a Cookie is the
appropriate format for a Set-Cookie: header. This is the
default behavior. You can change the header and printed
attributes by using the .output() function
>>> C = Cookie.SmartCookie()
>>> C["rocky"] = "road"
>>> C["rocky"]["path"] = "/cookie"
>>> print C.output(header="Cookie:")
Cookie: rocky=road; Path=/cookie
>>> print C.output(attrs=[], header="Cookie:")
Cookie: rocky=road
The load() method of a Cookie extracts cookies from a string. In a
CGI script, you would use this method to extract the cookies from the
HTTP_COOKIE environment variable.
>>> C = Cookie.SmartCookie()
>>> C.load("chips=ahoy; vienna=finger")
>>> C.output()
'Set-Cookie: chips=ahoy\r\nSet-Cookie: vienna=finger'
The load() method is darn-tootin smart about identifying cookies
within a string. Escaped quotation marks, nested semicolons, and other
such trickeries do not confuse it.
>>> C = Cookie.SmartCookie()
>>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";')
>>> print C
Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;"
Each element of the Cookie also supports all of the RFC 2109
Cookie attributes. Here's an example which sets the Path
attribute.
>>> C = Cookie.SmartCookie()
>>> C["oreo"] = "doublestuff"
>>> C["oreo"]["path"] = "/"
>>> print C
Set-Cookie: oreo=doublestuff; Path=/
Each dictionary element has a 'value' attribute, which gives you
back the value associated with the key.
>>> C = Cookie.SmartCookie()
>>> C["twix"] = "none for you"
>>> C["twix"].value
'none for you'
A Bit More Advanced
-------------------
As mentioned before, there are three different flavors of Cookie
objects, each with different encoding/decoding semantics. This
section briefly discusses the differences.
SimpleCookie
The SimpleCookie expects that all values should be standard strings.
Just to be sure, SimpleCookie invokes the str() builtin to convert
the value to a string, when the values are set dictionary-style.
>>> C = Cookie.SimpleCookie()
>>> C["number"] = 7
>>> C["string"] = "seven"
>>> C["number"].value
'7'
>>> C["string"].value
'seven'
>>> C.output()
'Set-Cookie: number=7\r\nSet-Cookie: string=seven'
SerialCookie
The SerialCookie expects that all values should be serialized using
cPickle (or pickle, if cPickle isn't available). As a result of
serializing, SerialCookie can save almost any Python object to a
value, and recover the exact same object when the cookie has been
returned. (SerialCookie can yield some strange-looking cookie
values, however.)
>>> C = Cookie.SerialCookie()
>>> C["number"] = 7
>>> C["string"] = "seven"
>>> C["number"].value
7
>>> C["string"].value
'seven'
>>> C.output()
'Set-Cookie: number="I7\\012."\r\nSet-Cookie: string="S\'seven\'\\012p1\\012."'
Be warned, however, if SerialCookie cannot de-serialize a value (because
it isn't a valid pickle'd object), IT WILL RAISE AN EXCEPTION.
SmartCookie
The SmartCookie combines aspects of each of the other two flavors.
When setting a value in a dictionary-fashion, the SmartCookie will
serialize (ala cPickle) the value *if and only if* it isn't a
Python string. String objects are *not* serialized. Similarly,
when the load() method parses out values, it attempts to de-serialize
the value. If it fails, then it fallsback to treating the value
as a string.
>>> C = Cookie.SmartCookie()
>>> C["number"] = 7
>>> C["string"] = "seven"
>>> C["number"].value
7
>>> C["string"].value
'seven'
>>> C.output()
'Set-Cookie: number="I7\\012."\r\nSet-Cookie: string=seven'
Backwards Compatibility
-----------------------
In order to keep compatibilty with earlier versions of Cookie.py,
it is still possible to use Cookie.Cookie() to create a Cookie. In
fact, this simply returns a SmartCookie.
>>> C = Cookie.Cookie()
>>> print C.__class__.__name__
SmartCookie
Finis.
""" #"
# ^
# |----helps out font-lock
#
# Import our required modules
#
import string
try:
from cPickle import dumps, loads
except ImportError:
from pickle import dumps, loads
import re, warnings
__all__ = ["CookieError","BaseCookie","SimpleCookie","SerialCookie",
"SmartCookie","Cookie"]
_nulljoin = ''.join
_semispacejoin = '; '.join
_spacejoin = ' '.join
#
# Define an exception visible to External modules
#
class CookieError(Exception):
pass
# These quoting routines conform to the RFC2109 specification, which in
# turn references the character definitions from RFC2068. They provide
# a two-way quoting algorithm. Any non-text character is translated
# into a 4 character sequence: a forward-slash followed by the
# three-digit octal equivalent of the character. Any '\' or '"' is
# quoted with a preceeding '\' slash.
#
# These are taken from RFC2068 and RFC2109.
# _LegalChars is the list of chars which don't require "'s
# _Translator hash-table for fast quoting
#
_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~"
_Translator = {
'\000' : '\\000', '\001' : '\\001', '\002' : '\\002',
'\003' : '\\003', '\004' : '\\004', '\005' : '\\005',
'\006' : '\\006', '\007' : '\\007', '\010' : '\\010',
'\011' : '\\011', '\012' : '\\012', '\013' : '\\013',
'\014' : '\\014', '\015' : '\\015', '\016' : '\\016',
'\017' : '\\017', '\020' : '\\020', '\021' : '\\021',
'\022' : '\\022', '\023' : '\\023', '\024' : '\\024',
'\025' : '\\025', '\026' : '\\026', '\027' : '\\027',
'\030' : '\\030', '\031' : '\\031', '\032' : '\\032',
'\033' : '\\033', '\034' : '\\034', '\035' : '\\035',
'\036' : '\\036', '\037' : '\\037',
# Because of the way browsers really handle cookies (as opposed
# to what the RFC says) we also encode , and ;
',' : '\\054', ';' : '\\073',
'"' : '\\"', '\\' : '\\\\',
'\177' : '\\177', '\200' : '\\200', '\201' : '\\201',
'\202' : '\\202', '\203' : '\\203', '\204' : '\\204',
'\205' : '\\205', '\206' : '\\206', '\207' : '\\207',
'\210' : '\\210', '\211' : '\\211', '\212' : '\\212',
'\213' : '\\213', '\214' : '\\214', '\215' : '\\215',
'\216' : '\\216', '\217' : '\\217', '\220' : '\\220',
'\221' : '\\221', '\222' : '\\222', '\223' : '\\223',
'\224' : '\\224', '\225' : '\\225', '\226' : '\\226',
'\227' : '\\227', '\230' : '\\230', '\231' : '\\231',
'\232' : '\\232', '\233' : '\\233', '\234' : '\\234',
'\235' : '\\235', '\236' : '\\236', '\237' : '\\237',
'\240' : '\\240', '\241' : '\\241', '\242' : '\\242',
'\243' : '\\243', '\244' : '\\244', '\245' : '\\245',
'\246' : '\\246', '\247' : '\\247', '\250' : '\\250',
'\251' : '\\251', '\252' : '\\252', '\253' : '\\253',
'\254' : '\\254', '\255' : '\\255', '\256' : '\\256',
'\257' : '\\257', '\260' : '\\260', '\261' : '\\261',
'\262' : '\\262', '\263' : '\\263', '\264' : '\\264',
'\265' : '\\265', '\266' : '\\266', '\267' : '\\267',
'\270' : '\\270', '\271' : '\\271', '\272' : '\\272',
'\273' : '\\273', '\274' : '\\274', '\275' : '\\275',
'\276' : '\\276', '\277' : '\\277', '\300' : '\\300',
'\301' : '\\301', '\302' : '\\302', '\303' : '\\303',
'\304' : '\\304', '\305' : '\\305', '\306' : '\\306',
'\307' : '\\307', '\310' : '\\310', '\311' : '\\311',
'\312' : '\\312', '\313' : '\\313', '\314' : '\\314',
'\315' : '\\315', '\316' : '\\316', '\317' : '\\317',
'\320' : '\\320', '\321' : '\\321', '\322' : '\\322',
'\323' : '\\323', '\324' : '\\324', '\325' : '\\325',
'\326' : '\\326', '\327' : '\\327', '\330' : '\\330',
'\331' : '\\331', '\332' : '\\332', '\333' : '\\333',
'\334' : '\\334', '\335' : '\\335', '\336' : '\\336',
'\337' : '\\337', '\340' : '\\340', '\341' : '\\341',
'\342' : '\\342', '\343' : '\\343', '\344' : '\\344',
'\345' : '\\345', '\346' : '\\346', '\347' : '\\347',
'\350' : '\\350', '\351' : '\\351', '\352' : '\\352',
'\353' : '\\353', '\354' : '\\354', '\355' : '\\355',
'\356' : '\\356', '\357' : '\\357', '\360' : '\\360',
'\361' : '\\361', '\362' : '\\362', '\363' : '\\363',
'\364' : '\\364', '\365' : '\\365', '\366' : '\\366',
'\367' : '\\367', '\370' : '\\370', '\371' : '\\371',
'\372' : '\\372', '\373' : '\\373', '\374' : '\\374',
'\375' : '\\375', '\376' : '\\376', '\377' : '\\377'
}
_idmap = ''.join(chr(x) for x in xrange(256))
def _quote(str, LegalChars=_LegalChars,
idmap=_idmap, translate=string.translate):
#
# If the string does not need to be double-quoted,
# then just return the string. Otherwise, surround
# the string in doublequotes and precede quote (with a \)
# special characters.
#
if "" == translate(str, idmap, LegalChars):
return str
else:
return '"' + _nulljoin( map(_Translator.get, str, str) ) + '"'
# end _quote
_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]")
_QuotePatt = re.compile(r"[\\].")
def _unquote(str):
# If there aren't any doublequotes,
# then there can't be any special characters. See RFC 2109.
if len(str) < 2:
return str
if str[0] != '"' or str[-1] != '"':
return str
# We have to assume that we must decode this string.
# Down to work.
# Remove the "s
str = str[1:-1]
# Check for special sequences. Examples:
# \012 --> \n
# \" --> "
#
i = 0
n = len(str)
res = []
while 0 <= i < n:
Omatch = _OctalPatt.search(str, i)
Qmatch = _QuotePatt.search(str, i)
if not Omatch and not Qmatch: # Neither matched
res.append(str[i:])
break
# else:
j = k = -1
if Omatch: j = Omatch.start(0)
if Qmatch: k = Qmatch.start(0)
if Qmatch and ( not Omatch or k < j ): # QuotePatt matched
res.append(str[i:k])
res.append(str[k+1])
i = k+2
else: # OctalPatt matched
res.append(str[i:j])
res.append( chr( int(str[j+1:j+4], 8) ) )
i = j+4
return _nulljoin(res)
# end _unquote
# The _getdate() routine is used to set the expiration time in
# the cookie's HTTP header. By default, _getdate() returns the
# current time in the appropriate "expires" format for a
# Set-Cookie header. The one optional argument is an offset from
# now, in seconds. For example, an offset of -3600 means "one hour ago".
# The offset may be a floating point number.
#
_weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
_monthname = [None,
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
def _getdate(future=0, weekdayname=_weekdayname, monthname=_monthname):
from time import gmtime, time
now = time()
year, month, day, hh, mm, ss, wd, y, z = gmtime(now + future)
return "%s, %02d-%3s-%4d %02d:%02d:%02d GMT" % \
(weekdayname[wd], day, monthname[month], year, hh, mm, ss)
#
# A class to hold ONE key,value pair.
# In a cookie, each such pair may have several attributes.
# so this class is used to keep the attributes associated
# with the appropriate key,value pair.
# This class also includes a coded_value attribute, which
# is used to hold the network representation of the
# value. This is most useful when Python objects are
# pickled for network transit.
#
class Morsel(dict):
# RFC 2109 lists these attributes as reserved:
# path comment domain
# max-age secure version
#
# For historical reasons, these attributes are also reserved:
# expires
#
# This is an extension from Microsoft:
# httponly
#
# This dictionary provides a mapping from the lowercase
# variant on the left to the appropriate traditional
# formatting on the right.
_reserved = { "expires" : "expires",
"path" : "Path",
"comment" : "Comment",
"domain" : "Domain",
"max-age" : "Max-Age",
"secure" : "secure",
"httponly" : "httponly",
"version" : "Version",
}
def __init__(self):
# Set defaults
self.key = self.value = self.coded_value = None
# Set default attributes
for K in self._reserved:
dict.__setitem__(self, K, "")
# end __init__
def __setitem__(self, K, V):
K = K.lower()
if not K in self._reserved:
raise CookieError("Invalid Attribute %s" % K)
dict.__setitem__(self, K, V)
# end __setitem__
def isReservedKey(self, K):
return K.lower() in self._reserved
# end isReservedKey
def set(self, key, val, coded_val,
LegalChars=_LegalChars,
idmap=_idmap, translate=string.translate):
# First we verify that the key isn't a reserved word
# Second we make sure it only contains legal characters
if key.lower() in self._reserved:
raise CookieError("Attempt to set a reserved key: %s" % key)
if "" != translate(key, idmap, LegalChars):
raise CookieError("Illegal key value: %s" % key)
# It's a good key, so save it.
self.key = key
self.value = val
self.coded_value = coded_val
# end set
def output(self, attrs=None, header = "Set-Cookie:"):
return "%s %s" % ( header, self.OutputString(attrs) )
__str__ = output
def __repr__(self):
return '<%s: %s=%s>' % (self.__class__.__name__,
self.key, repr(self.value) )
def js_output(self, attrs=None):
# Print javascript
return """
<script type="text/javascript">
<!-- begin hiding
document.cookie = \"%s\";
// end hiding -->
</script>
""" % ( self.OutputString(attrs).replace('"',r'\"'), )
# end js_output()
def OutputString(self, attrs=None):
# Build up our result
#
result = []
RA = result.append
# First, the key=value pair
RA("%s=%s" % (self.key, self.coded_value))
# Now add any defined attributes
if attrs is None:
attrs = self._reserved
items = self.items()
items.sort()
for K,V in items:
if V == "": continue
if K not in attrs: continue
if K == "expires" and type(V) == type(1):
RA("%s=%s" % (self._reserved[K], _getdate(V)))
elif K == "max-age" and type(V) == type(1):
RA("%s=%d" % (self._reserved[K], V))
elif K == "secure":
RA(str(self._reserved[K]))
elif K == "httponly":
RA(str(self._reserved[K]))
else:
RA("%s=%s" % (self._reserved[K], V))
# Return the result
return _semispacejoin(result)
# end OutputString
# end Morsel class
#
# Pattern for finding cookie
#
# This used to be strict parsing based on the RFC2109 and RFC2068
# specifications. I have since discovered that MSIE 3.0x doesn't
# follow the character rules outlined in those specs. As a
# result, the parsing rules here are less strict.
#
_LegalCharsPatt = r"[\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]"
_CookiePattern = re.compile(
r"(?x)" # This is a Verbose pattern
r"(?P<key>" # Start of group 'key'
""+ _LegalCharsPatt +"+?" # Any word of at least one letter, nongreedy
r")" # End of group 'key'
r"\s*=\s*" # Equal Sign
r"(?P<val>" # Start of group 'val'
r'"(?:[^\\"]|\\.)*"' # Any doublequoted string
r"|" # or
r"\w{3},\s[\w\d-]{9,11}\s[\d:]{8}\sGMT" # Special case for "expires" attr
r"|" # or
""+ _LegalCharsPatt +"*" # Any word or empty string
r")" # End of group 'val'
r"\s*;?" # Probably ending in a semi-colon
)
# At long last, here is the cookie class.
# Using this class is almost just like using a dictionary.
# See this module's docstring for example usage.
#
class BaseCookie(dict):
# A container class for a set of Morsels
#
def value_decode(self, val):
"""real_value, coded_value = value_decode(STRING)
Called prior to setting a cookie's value from the network
representation. The VALUE is the value read from HTTP
header.
Override this function to modify the behavior of cookies.
"""
return val, val
# end value_encode
def value_encode(self, val):
"""real_value, coded_value = value_encode(VALUE)
Called prior to setting a cookie's value from the dictionary
representation. The VALUE is the value being assigned.
Override this function to modify the behavior of cookies.
"""
strval = str(val)
return strval, strval
# end value_encode
def __init__(self, input=None):
if input: self.load(input)
# end __init__
def __set(self, key, real_value, coded_value):
"""Private method for setting a cookie's value"""
M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
# end __set
def __setitem__(self, key, value):
"""Dictionary style assignment."""
rval, cval = self.value_encode(value)
self.__set(key, rval, cval)
# end __setitem__
def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"):
"""Return a string suitable for HTTP."""
result = []
items = self.items()
items.sort()
for K,V in items:
result.append( V.output(attrs, header) )
return sep.join(result)
# end output
__str__ = output
def __repr__(self):
L = []
items = self.items()
items.sort()
for K,V in items:
L.append( '%s=%s' % (K,repr(V.value) ) )
return '<%s: %s>' % (self.__class__.__name__, _spacejoin(L))
def js_output(self, attrs=None):
"""Return a string suitable for JavaScript."""
result = []
items = self.items()
items.sort()
for K,V in items:
result.append( V.js_output(attrs) )
return _nulljoin(result)
# end js_output
def load(self, rawdata):
"""Load cookies from a string (presumably HTTP_COOKIE) or
from a dictionary. Loading cookies from a dictionary 'd'
is equivalent to calling:
map(Cookie.__setitem__, d.keys(), d.values())
"""
if type(rawdata) == type(""):
self.__ParseString(rawdata)
else:
# self.update() wouldn't call our custom __setitem__
for k, v in rawdata.items():
self[k] = v
return
# end load()
def __ParseString(self, str, patt=_CookiePattern):
i = 0 # Our starting point
n = len(str) # Length of string
M = None # current morsel
while 0 <= i < n:
# Start looking for a cookie
match = patt.search(str, i)
if not match: break # No more cookies
K,V = match.group("key"), match.group("val")
i = match.end(0)
# Parse the key, value in case it's metainfo
if K[0] == "$":
# We ignore attributes which pertain to the cookie
# mechanism as a whole. See RFC 2109.
# (Does anyone care?)
if M:
M[ K[1:] ] = V
elif K.lower() in Morsel._reserved:
if M:
M[ K ] = _unquote(V)
else:
rval, cval = self.value_decode(V)
self.__set(K, rval, cval)
M = self[K]
# end __ParseString
# end BaseCookie class
class SimpleCookie(BaseCookie):
"""SimpleCookie
SimpleCookie supports strings as cookie values. When setting
the value using the dictionary assignment notation, SimpleCookie
calls the builtin str() to convert the value to a string. Values
received from HTTP are kept as strings.
"""
def value_decode(self, val):
return _unquote( val ), val
def value_encode(self, val):
strval = str(val)
return strval, _quote( strval )
# end SimpleCookie
class SerialCookie(BaseCookie):
"""SerialCookie
SerialCookie supports arbitrary objects as cookie values. All
values are serialized (using cPickle) before being sent to the
client. All incoming values are assumed to be valid Pickle
representations. IF AN INCOMING VALUE IS NOT IN A VALID PICKLE
FORMAT, THEN AN EXCEPTION WILL BE RAISED.
Note: Large cookie values add overhead because they must be
retransmitted on every HTTP transaction.
Note: HTTP has a 2k limit on the size of a cookie. This class
does not check for this limit, so be careful!!!
"""
def __init__(self, input=None):
warnings.warn("SerialCookie class is insecure; do not use it",
DeprecationWarning)
BaseCookie.__init__(self, input)
# end __init__
def value_decode(self, val):
# This could raise an exception!
return loads( _unquote(val) ), val
def value_encode(self, val):
return val, _quote( dumps(val) )
# end SerialCookie
class SmartCookie(BaseCookie):
"""SmartCookie
SmartCookie supports arbitrary objects as cookie values. If the
object is a string, then it is quoted. If the object is not a
string, however, then SmartCookie will use cPickle to serialize
the object into a string representation.
Note: Large cookie values add overhead because they must be
retransmitted on every HTTP transaction.
Note: HTTP has a 2k limit on the size of a cookie. This class
does not check for this limit, so be careful!!!
"""
def __init__(self, input=None):
warnings.warn("Cookie/SmartCookie class is insecure; do not use it",
DeprecationWarning)
BaseCookie.__init__(self, input)
# end __init__
def value_decode(self, val):
strval = _unquote(val)
try:
return loads(strval), val
except:
return strval, val
def value_encode(self, val):
if type(val) == type(""):
return val, _quote(val)
else:
return val, _quote( dumps(val) )
# end SmartCookie
###########################################################
# Backwards Compatibility: Don't break any existing code!
# We provide Cookie() as an alias for SmartCookie()
Cookie = SmartCookie
#
###########################################################
def _test():
import doctest, Cookie
return doctest.testmod(Cookie)
if __name__ == "__main__":
_test()
#Local Variables:
#tab-width: 4
#end:
|
CYBAI/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/wpt.py
|
24
|
# This file exists to allow `python wpt <command>` to work on Windows:
# https://github.com/web-platform-tests/wpt/pull/6907 and
# https://github.com/web-platform-tests/wpt/issues/23095
import os
abspath = os.path.abspath(__file__)
os.chdir(os.path.dirname(abspath))
exec(compile(open("wpt", "r").read(), "wpt", 'exec'))
|
micbou/ycmd
|
refs/heads/master
|
ycmd/tests/extra_conf_store_test.py
|
4
|
# Copyright (C) 2016-2018 ycmd contributors
#
# This file is part of ycmd.
#
# ycmd is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ycmd is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ycmd. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
# Not installing aliases from python-future; it's unreliable and slow.
from builtins import * # noqa
import inspect
from mock import patch
from hamcrest import ( assert_that, calling, equal_to, has_length, has_property,
none, raises, same_instance )
from ycmd import extra_conf_store
from ycmd.responses import UnknownExtraConf
from ycmd.tests import IsolatedYcmd, PathToTestFile
from ycmd.tests.test_utils import TemporarySymlink, UnixOnly, WindowsOnly
GLOBAL_EXTRA_CONF = PathToTestFile( 'extra_conf', 'global_extra_conf.py' )
ERRONEOUS_EXTRA_CONF = PathToTestFile( 'extra_conf', 'erroneous_extra_conf.py' )
NO_EXTRA_CONF = PathToTestFile( 'extra_conf', 'no_extra_conf.py' )
PROJECT_EXTRA_CONF = PathToTestFile( 'extra_conf', 'project',
'.ycm_extra_conf.py' )
@IsolatedYcmd()
def ExtraConfStore_ModuleForSourceFile_UnknownExtraConf_test( app ):
filename = PathToTestFile( 'extra_conf', 'project', 'some_file' )
assert_that(
calling( extra_conf_store.ModuleForSourceFile ).with_args( filename ),
raises( UnknownExtraConf, 'Found .*\\.ycm_extra_conf\\.py\\. Load?' )
)
@IsolatedYcmd( { 'confirm_extra_conf': 0 } )
def ExtraConfStore_ModuleForSourceFile_NoConfirmation_test( app ):
filename = PathToTestFile( 'extra_conf', 'project', 'some_file' )
module = extra_conf_store.ModuleForSourceFile( filename )
assert_that( inspect.ismodule( module ) )
assert_that( inspect.getfile( module ), equal_to( PROJECT_EXTRA_CONF ) )
assert_that( module, has_property( 'is_global_ycm_extra_conf' ) )
assert_that( module.is_global_ycm_extra_conf, equal_to( False ) )
assert_that( extra_conf_store.IsGlobalExtraConfModule( module ),
equal_to( False ) )
@IsolatedYcmd( { 'extra_conf_globlist': [ PROJECT_EXTRA_CONF ] } )
def ExtraConfStore_ModuleForSourceFile_Whitelisted_test( app ):
filename = PathToTestFile( 'extra_conf', 'project', 'some_file' )
module = extra_conf_store.ModuleForSourceFile( filename )
assert_that( inspect.ismodule( module ) )
assert_that( inspect.getfile( module ), equal_to( PROJECT_EXTRA_CONF ) )
assert_that( module, has_property( 'is_global_ycm_extra_conf' ) )
assert_that( module.is_global_ycm_extra_conf, equal_to( False ) )
assert_that( extra_conf_store.IsGlobalExtraConfModule( module ),
equal_to( False ) )
@IsolatedYcmd( { 'extra_conf_globlist': [ '!' + PROJECT_EXTRA_CONF ] } )
def ExtraConfStore_ModuleForSourceFile_Blacklisted_test( app ):
filename = PathToTestFile( 'extra_conf', 'project', 'some_file' )
assert_that( extra_conf_store.ModuleForSourceFile( filename ), none() )
@patch.dict( 'os.environ', { 'YCMD_TEST': PROJECT_EXTRA_CONF } )
@IsolatedYcmd( { 'extra_conf_globlist': [ '$YCMD_TEST' ] } )
def ExtraConfStore_ModuleForSourceFile_UnixVarEnv_test( app ):
filename = PathToTestFile( 'extra_conf', 'project', 'some_file' )
module = extra_conf_store.ModuleForSourceFile( filename )
assert_that( inspect.ismodule( module ) )
assert_that( inspect.getfile( module ), equal_to( PROJECT_EXTRA_CONF ) )
assert_that( module, has_property( 'is_global_ycm_extra_conf' ) )
assert_that( module.is_global_ycm_extra_conf, equal_to( False ) )
assert_that( extra_conf_store.IsGlobalExtraConfModule( module ),
equal_to( False ) )
@WindowsOnly
@patch.dict( 'os.environ', { 'YCMD_TEST': PROJECT_EXTRA_CONF } )
@IsolatedYcmd( { 'extra_conf_globlist': [ '%YCMD_TEST%' ] } )
def ExtraConfStore_ModuleForSourceFile_WinVarEnv_test( app ):
filename = PathToTestFile( 'extra_conf', 'project', 'some_file' )
module = extra_conf_store.ModuleForSourceFile( filename )
assert_that( inspect.ismodule( module ) )
assert_that( inspect.getfile( module ), equal_to( PROJECT_EXTRA_CONF ) )
assert_that( module, has_property( 'is_global_ycm_extra_conf' ) )
assert_that( module.is_global_ycm_extra_conf, equal_to( False ) )
assert_that( extra_conf_store.IsGlobalExtraConfModule( module ),
equal_to( False ) )
@UnixOnly
@IsolatedYcmd( { 'extra_conf_globlist': [
PathToTestFile( 'extra_conf', 'symlink', '*' ) ] } )
def ExtraConfStore_ModuleForSourceFile_SupportSymlink_test( app ):
with TemporarySymlink( PathToTestFile( 'extra_conf', 'project' ),
PathToTestFile( 'extra_conf', 'symlink' ) ):
filename = PathToTestFile( 'extra_conf', 'project', 'some_file' )
module = extra_conf_store.ModuleForSourceFile( filename )
assert_that( inspect.ismodule( module ) )
assert_that( inspect.getfile( module ), equal_to( PROJECT_EXTRA_CONF ) )
assert_that( module, has_property( 'is_global_ycm_extra_conf' ) )
assert_that( module.is_global_ycm_extra_conf, equal_to( False ) )
assert_that( extra_conf_store.IsGlobalExtraConfModule( module ),
equal_to( False ) )
@IsolatedYcmd( { 'global_ycm_extra_conf': GLOBAL_EXTRA_CONF } )
def ExtraConfStore_ModuleForSourceFile_GlobalExtraConf_test( app ):
filename = PathToTestFile( 'extra_conf', 'some_file' )
module = extra_conf_store.ModuleForSourceFile( filename )
assert_that( inspect.ismodule( module ) )
assert_that( inspect.getfile( module ), equal_to( GLOBAL_EXTRA_CONF ) )
assert_that( module, has_property( 'is_global_ycm_extra_conf' ) )
assert_that( module.is_global_ycm_extra_conf, equal_to( True ) )
assert_that( extra_conf_store.IsGlobalExtraConfModule( module ),
equal_to( True ) )
@patch.dict( 'os.environ', { 'YCMD_TEST': GLOBAL_EXTRA_CONF } )
@IsolatedYcmd( { 'global_ycm_extra_conf': '$YCMD_TEST' } )
def ExtraConfStore_ModuleForSourceFile_GlobalExtraConf_UnixEnvVar_test( app ):
filename = PathToTestFile( 'extra_conf', 'some_file' )
module = extra_conf_store.ModuleForSourceFile( filename )
assert_that( inspect.ismodule( module ) )
assert_that( inspect.getfile( module ), equal_to( GLOBAL_EXTRA_CONF ) )
assert_that( module, has_property( 'is_global_ycm_extra_conf' ) )
assert_that( module.is_global_ycm_extra_conf, equal_to( True ) )
assert_that( extra_conf_store.IsGlobalExtraConfModule( module ),
equal_to( True ) )
@WindowsOnly
@patch.dict( 'os.environ', { 'YCMD_TEST': GLOBAL_EXTRA_CONF } )
@IsolatedYcmd( { 'global_ycm_extra_conf': '%YCMD_TEST%' } )
def ExtraConfStore_ModuleForSourceFile_GlobalExtraConf_WinEnvVar_test( app ):
filename = PathToTestFile( 'extra_conf', 'some_file' )
module = extra_conf_store.ModuleForSourceFile( filename )
assert_that( inspect.ismodule( module ) )
assert_that( inspect.getfile( module ), equal_to( GLOBAL_EXTRA_CONF ) )
assert_that( module, has_property( 'is_global_ycm_extra_conf' ) )
assert_that( module.is_global_ycm_extra_conf, equal_to( True ) )
assert_that( extra_conf_store.IsGlobalExtraConfModule( module ),
equal_to( True ) )
@IsolatedYcmd( { 'global_ycm_extra_conf': NO_EXTRA_CONF } )
@patch( 'ycmd.extra_conf_store.LOGGER', autospec = True )
def ExtraConfStore_CallGlobalExtraConfMethod_NoGlobalExtraConf_test( app,
logger ):
extra_conf_store._CallGlobalExtraConfMethod( 'SomeMethod' )
assert_that( logger.method_calls, has_length( 1 ) )
logger.debug.assert_called_with(
'No global extra conf, not calling method %s',
'SomeMethod' )
@IsolatedYcmd( { 'global_ycm_extra_conf': GLOBAL_EXTRA_CONF } )
@patch( 'ycmd.extra_conf_store.LOGGER', autospec = True )
def CallGlobalExtraConfMethod_NoMethodInGlobalExtraConf_test( app, logger ):
extra_conf_store._CallGlobalExtraConfMethod( 'MissingMethod' )
assert_that( logger.method_calls, has_length( 1 ) )
logger.debug.assert_called_with(
'Global extra conf not loaded or no function %s',
'MissingMethod' )
@IsolatedYcmd( { 'global_ycm_extra_conf': GLOBAL_EXTRA_CONF } )
@patch( 'ycmd.extra_conf_store.LOGGER', autospec = True )
def CallGlobalExtraConfMethod_NoExceptionFromMethod_test( app, logger ):
extra_conf_store._CallGlobalExtraConfMethod( 'NoException' )
assert_that( logger.method_calls, has_length( 1 ) )
logger.info.assert_called_with(
'Calling global extra conf method %s on conf file %s',
'NoException',
GLOBAL_EXTRA_CONF )
@IsolatedYcmd( { 'global_ycm_extra_conf': GLOBAL_EXTRA_CONF } )
@patch( 'ycmd.extra_conf_store.LOGGER', autospec = True )
def CallGlobalExtraConfMethod_CatchExceptionFromMethod_test( app, logger ):
extra_conf_store._CallGlobalExtraConfMethod( 'RaiseException' )
assert_that( logger.method_calls, has_length( 2 ) )
logger.info.assert_called_with(
'Calling global extra conf method %s on conf file %s',
'RaiseException',
GLOBAL_EXTRA_CONF )
logger.exception.assert_called_with(
'Error occurred while calling global extra conf method %s on conf file %s',
'RaiseException',
GLOBAL_EXTRA_CONF )
@IsolatedYcmd( { 'global_ycm_extra_conf': ERRONEOUS_EXTRA_CONF } )
@patch( 'ycmd.extra_conf_store.LOGGER', autospec = True )
def CallGlobalExtraConfMethod_CatchExceptionFromExtraConf_test( app, logger ):
extra_conf_store._CallGlobalExtraConfMethod( 'NoException' )
assert_that( logger.method_calls, has_length( 1 ) )
logger.exception.assert_called_with(
'Error occurred while loading global extra conf %s',
ERRONEOUS_EXTRA_CONF )
@IsolatedYcmd()
def Load_DoNotReloadExtraConf_NoForce_test( app ):
with patch( 'ycmd.extra_conf_store._ShouldLoad', return_value = True ):
module = extra_conf_store.Load( PROJECT_EXTRA_CONF )
assert_that( inspect.ismodule( module ) )
assert_that( inspect.getfile( module ), equal_to( PROJECT_EXTRA_CONF ) )
assert_that( module, has_property( 'is_global_ycm_extra_conf' ) )
assert_that( module.is_global_ycm_extra_conf, equal_to( False ) )
assert_that( extra_conf_store.IsGlobalExtraConfModule( module ),
equal_to( False ) )
assert_that(
extra_conf_store.Load( PROJECT_EXTRA_CONF ),
same_instance( module )
)
@IsolatedYcmd()
def Load_DoNotReloadExtraConf_ForceEqualsTrue_test( app ):
with patch( 'ycmd.extra_conf_store._ShouldLoad', return_value = True ):
module = extra_conf_store.Load( PROJECT_EXTRA_CONF )
assert_that( inspect.ismodule( module ) )
assert_that( inspect.getfile( module ), equal_to( PROJECT_EXTRA_CONF ) )
assert_that( module, has_property( 'is_global_ycm_extra_conf' ) )
assert_that( module.is_global_ycm_extra_conf, equal_to( False ) )
assert_that( extra_conf_store.IsGlobalExtraConfModule( module ),
equal_to( False ) )
assert_that(
extra_conf_store.Load( PROJECT_EXTRA_CONF, force = True ),
same_instance( module )
)
def ExtraConfStore_IsGlobalExtraConfStore_NotAExtraConf_test():
assert_that( calling( extra_conf_store.IsGlobalExtraConfModule ).with_args(
extra_conf_store ), raises( AttributeError ) )
|
ropik/chromium
|
refs/heads/master
|
tools/json_schema_compiler/json_schema.py
|
5
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import json
import os.path
import sys
_script_path = os.path.realpath(__file__)
sys.path.insert(0, os.path.normpath(_script_path + "/../../"))
import json_comment_eater
def DeleteNocompileNodes(item):
def HasNocompile(thing):
return type(thing) == dict and thing.get('nocompile', False)
if type(item) == dict:
toDelete = []
for key, value in item.items():
if HasNocompile(value):
toDelete.append(key)
else:
DeleteNocompileNodes(value)
for key in toDelete:
del item[key]
elif type(item) == list:
item[:] = [DeleteNocompileNodes(thing)
for thing in item if not HasNocompile(thing)]
return item
def Load(filename):
with open(filename, 'r') as handle:
return DeleteNocompileNodes(
json.loads(json_comment_eater.Nom(handle.read())))
# A dictionary mapping |filename| to the object resulting from loading the JSON
# at |filename|.
_cache = {}
def CachedLoad(filename):
"""Equivalent to Load(filename), but caches results for subsequent calls"""
if filename not in _cache:
_cache[filename] = Load(filename)
# Return a copy of the object so that any changes a caller makes won't affect
# the next caller.
return copy.deepcopy(_cache[filename])
|
Gui13/CouchPotatoServer
|
refs/heads/master
|
couchpotato/core/media/movie/providers/metadata/mediabrowser.py
|
75
|
import os
from couchpotato.core.media.movie.providers.metadata.base import MovieMetaData
autoload = 'MediaBrowser'
class MediaBrowser(MovieMetaData):
def getThumbnailName(self, name, root, i):
return os.path.join(root, 'folder.jpg')
def getFanartName(self, name, root, i):
return os.path.join(root, 'backdrop.jpg')
config = [{
'name': 'mediabrowser',
'groups': [
{
'tab': 'renamer',
'subtab': 'metadata',
'name': 'mediabrowser_metadata',
'label': 'MediaBrowser',
'description': 'Generate folder.jpg and backdrop.jpg',
'options': [
{
'name': 'meta_enabled',
'default': False,
'type': 'enabler',
},
],
},
],
}]
|
nuuuboo/odoo
|
refs/heads/8.0
|
addons/crm_claim/__openerp__.py
|
260
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Claims Management',
'version': '1.0',
'category': 'Customer Relationship Management',
'description': """
Manage Customer Claims.
=======================
This application allows you to track your customers/suppliers claims and grievances.
It is fully integrated with the email gateway so that you can create
automatically new claims based on incoming emails.
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com',
'depends': ['crm'],
'data': [
'crm_claim_view.xml',
'crm_claim_menu.xml',
'security/ir.model.access.csv',
'report/crm_claim_report_view.xml',
'crm_claim_data.xml',
'res_partner_view.xml',
],
'demo': ['crm_claim_demo.xml'],
'test': [
'test/process/claim.yml',
'test/ui/claim_demo.yml'
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
proxysh/Safejumper-for-Mac
|
refs/heads/master
|
buildmac/Resources/env/lib/python2.7/site-packages/twisted/internet/test/test_newtls.py
|
12
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.internet._newtls}.
"""
from __future__ import division, absolute_import
from twisted.trial import unittest
from twisted.internet import interfaces
from twisted.internet.test.reactormixins import ReactorBuilder
from twisted.internet.test.connectionmixins import (
ConnectableProtocol, runProtocolsWithReactor)
from twisted.internet.test.test_tls import SSLCreator, TLSMixin
from twisted.internet.test.test_tls import StartTLSClientCreator
from twisted.internet.test.test_tls import ContextGeneratingMixin
from twisted.internet.test.test_tcp import TCPCreator
try:
from twisted.protocols import tls
from twisted.internet import _newtls
except ImportError:
_newtls = None
from zope.interface import implementer
class BypassTLSTests(unittest.TestCase):
"""
Tests for the L{_newtls._BypassTLS} class.
"""
if not _newtls:
skip = "Couldn't import _newtls, perhaps pyOpenSSL is old or missing"
def test_loseConnectionPassThrough(self):
"""
C{_BypassTLS.loseConnection} calls C{loseConnection} on the base
class, while preserving any default argument in the base class'
C{loseConnection} implementation.
"""
default = object()
result = []
class FakeTransport(object):
def loseConnection(self, _connDone=default):
result.append(_connDone)
bypass = _newtls._BypassTLS(FakeTransport, FakeTransport())
# The default from FakeTransport is used:
bypass.loseConnection()
self.assertEqual(result, [default])
# And we can pass our own:
notDefault = object()
bypass.loseConnection(notDefault)
self.assertEqual(result, [default, notDefault])
class FakeProducer(object):
"""
A producer that does nothing.
"""
def pauseProducing(self):
pass
def resumeProducing(self):
pass
def stopProducing(self):
pass
@implementer(interfaces.IHandshakeListener)
class ProducerProtocol(ConnectableProtocol):
"""
Register a producer, unregister it, and verify the producer hooks up to
innards of C{TLSMemoryBIOProtocol}.
"""
def __init__(self, producer, result):
self.producer = producer
self.result = result
def handshakeCompleted(self):
if not isinstance(self.transport.protocol,
tls.TLSMemoryBIOProtocol):
# Either the test or the code have a bug...
raise RuntimeError("TLSMemoryBIOProtocol not hooked up.")
self.transport.registerProducer(self.producer, True)
# The producer was registered with the TLSMemoryBIOProtocol:
self.result.append(self.transport.protocol._producer._producer)
self.transport.unregisterProducer()
# The producer was unregistered from the TLSMemoryBIOProtocol:
self.result.append(self.transport.protocol._producer)
self.transport.loseConnection()
class ProducerTestsMixin(ReactorBuilder, TLSMixin, ContextGeneratingMixin):
"""
Test the new TLS code integrates C{TLSMemoryBIOProtocol} correctly.
"""
if not _newtls:
skip = "Could not import twisted.internet._newtls"
def test_producerSSLFromStart(self):
"""
C{registerProducer} and C{unregisterProducer} on TLS transports
created as SSL from the get go are passed to the
C{TLSMemoryBIOProtocol}, not the underlying transport directly.
"""
result = []
producer = FakeProducer()
runProtocolsWithReactor(self, ConnectableProtocol(),
ProducerProtocol(producer, result),
SSLCreator())
self.assertEqual(result, [producer, None])
def test_producerAfterStartTLS(self):
"""
C{registerProducer} and C{unregisterProducer} on TLS transports
created by C{startTLS} are passed to the C{TLSMemoryBIOProtocol}, not
the underlying transport directly.
"""
result = []
producer = FakeProducer()
runProtocolsWithReactor(self, ConnectableProtocol(),
ProducerProtocol(producer, result),
StartTLSClientCreator())
self.assertEqual(result, [producer, None])
def startTLSAfterRegisterProducer(self, streaming):
"""
When a producer is registered, and then startTLS is called,
the producer is re-registered with the C{TLSMemoryBIOProtocol}.
"""
clientContext = self.getClientContext()
serverContext = self.getServerContext()
result = []
producer = FakeProducer()
class RegisterTLSProtocol(ConnectableProtocol):
def connectionMade(self):
self.transport.registerProducer(producer, streaming)
self.transport.startTLS(serverContext)
# Store TLSMemoryBIOProtocol and underlying transport producer
# status:
if streaming:
# _ProducerMembrane -> producer:
result.append(self.transport.protocol._producer._producer)
result.append(self.transport.producer._producer)
else:
# _ProducerMembrane -> _PullToPush -> producer:
result.append(
self.transport.protocol._producer._producer._producer)
result.append(self.transport.producer._producer._producer)
self.transport.unregisterProducer()
self.transport.loseConnection()
class StartTLSProtocol(ConnectableProtocol):
def connectionMade(self):
self.transport.startTLS(clientContext)
runProtocolsWithReactor(self, RegisterTLSProtocol(),
StartTLSProtocol(), TCPCreator())
self.assertEqual(result, [producer, producer])
def test_startTLSAfterRegisterProducerStreaming(self):
"""
When a streaming producer is registered, and then startTLS is called,
the producer is re-registered with the C{TLSMemoryBIOProtocol}.
"""
self.startTLSAfterRegisterProducer(True)
def test_startTLSAfterRegisterProducerNonStreaming(self):
"""
When a non-streaming producer is registered, and then startTLS is
called, the producer is re-registered with the
C{TLSMemoryBIOProtocol}.
"""
self.startTLSAfterRegisterProducer(False)
globals().update(ProducerTestsMixin.makeTestCaseClasses())
|
lishensan/xbmc
|
refs/heads/master
|
tools/Fake Episode Maker/openAnything.py
|
169
|
# -*- coding: utf-8 -*-
# Copyright (C) 2008-2013 Team XBMC
# http://xbmc.org
#
# This Program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This Program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with XBMC; see the file COPYING. If not, see
# <http://www.gnu.org/licenses/>.
#
import urllib2, urlparse, gzip
from StringIO import StringIO
USER_AGENT = 'OpenAnything/1.0 +http://diveintopython.org/http_web_services/'
class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
def http_error_301(self, req, fp, code, msg, headers):
result = urllib2.HTTPRedirectHandler.http_error_301(
self, req, fp, code, msg, headers)
result.status = code
return result
def http_error_302(self, req, fp, code, msg, headers):
result = urllib2.HTTPRedirectHandler.http_error_302(
self, req, fp, code, msg, headers)
result.status = code
return result
class DefaultErrorHandler(urllib2.HTTPDefaultErrorHandler):
def http_error_default(self, req, fp, code, msg, headers):
result = urllib2.HTTPError(
req.get_full_url(), code, msg, headers, fp)
result.status = code
return result
def openAnything(source, etag=None, lastmodified=None, agent=USER_AGENT):
'''URL, filename, or string --> stream
This function lets you define parsers that take any input source
(URL, pathname to local or network file, or actual data as a string)
and deal with it in a uniform manner. Returned object is guaranteed
to have all the basic stdio read methods (read, readline, readlines).
Just .close() the object when you're done with it.
If the etag argument is supplied, it will be used as the value of an
If-None-Match request header.
If the lastmodified argument is supplied, it must be a formatted
date/time string in GMT (as returned in the Last-Modified header of
a previous request). The formatted date/time will be used
as the value of an If-Modified-Since request header.
If the agent argument is supplied, it will be used as the value of a
User-Agent request header.
'''
if hasattr(source, 'read'):
return source
if source == '-':
return sys.stdin
if urlparse.urlparse(source)[0] == 'http':
# open URL with urllib2
request = urllib2.Request(source)
request.add_header('User-Agent', agent)
if etag:
request.add_header('If-None-Match', etag)
if lastmodified:
request.add_header('If-Modified-Since', lastmodified)
request.add_header('Accept-encoding', 'gzip')
opener = urllib2.build_opener(SmartRedirectHandler(), DefaultErrorHandler())
return opener.open(request)
# try to open with native open function (if source is a filename)
try:
return open(source)
except (IOError, OSError):
pass
# treat source as string
return StringIO(str(source))
def fetch(source, etag=None, last_modified=None, agent=USER_AGENT):
'''Fetch data and metadata from a URL, file, stream, or string'''
result = {}
f = openAnything(source, etag, last_modified, agent)
result['data'] = f.read()
if hasattr(f, 'headers'):
# save ETag, if the server sent one
result['etag'] = f.headers.get('ETag')
# save Last-Modified header, if the server sent one
result['lastmodified'] = f.headers.get('Last-Modified')
if f.headers.get('content-encoding', '') == 'gzip':
# data came back gzip-compressed, decompress it
result['data'] = gzip.GzipFile(fileobj=StringIO(result['data'])).read()
if hasattr(f, 'url'):
result['url'] = f.url
result['status'] = 200
if hasattr(f, 'status'):
result['status'] = f.status
f.close()
return result
|
ApuliaSoftware/odoo
|
refs/heads/8.0
|
openerp/workflow/instance.py
|
314
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2014 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import workitem
from openerp.workflow.helpers import Session
from openerp.workflow.helpers import Record
from openerp.workflow.workitem import WorkflowItem
class WorkflowInstance(object):
def __init__(self, session, record, values):
assert isinstance(session, Session)
assert isinstance(record, Record)
self.session = session
self.record = record
if not values:
values = {}
assert isinstance(values, dict)
self.instance = values
@classmethod
def create(cls, session, record, workflow_id):
assert isinstance(session, Session)
assert isinstance(record, Record)
assert isinstance(workflow_id, (int, long))
cr = session.cr
cr.execute('insert into wkf_instance (res_type,res_id,uid,wkf_id,state) values (%s,%s,%s,%s,%s) RETURNING id', (record.model, record.id, session.uid, workflow_id, 'active'))
instance_id = cr.fetchone()[0]
cr.execute('select * from wkf_activity where flow_start=True and wkf_id=%s', (workflow_id,))
stack = []
activities = cr.dictfetchall()
for activity in activities:
WorkflowItem.create(session, record, activity, instance_id, stack)
cr.execute('SELECT * FROM wkf_instance WHERE id = %s', (instance_id,))
values = cr.dictfetchone()
wi = WorkflowInstance(session, record, values)
wi.update()
return wi
def delete(self):
self.session.cr.execute('delete from wkf_instance where res_id=%s and res_type=%s', (self.record.id, self.record.model))
def validate(self, signal, force_running=False):
assert isinstance(signal, basestring)
assert isinstance(force_running, bool)
cr = self.session.cr
cr.execute("select * from wkf_workitem where inst_id=%s", (self.instance['id'],))
stack = []
for work_item_values in cr.dictfetchall():
wi = WorkflowItem(self.session, self.record, work_item_values)
wi.process(signal=signal, force_running=force_running, stack=stack)
# An action is returned
self._update_end()
return stack and stack[0] or False
def update(self):
cr = self.session.cr
cr.execute("select * from wkf_workitem where inst_id=%s", (self.instance['id'],))
for work_item_values in cr.dictfetchall():
stack = []
WorkflowItem(self.session, self.record, work_item_values).process(stack=stack)
return self._update_end()
def _update_end(self):
cr = self.session.cr
instance_id = self.instance['id']
cr.execute('select wkf_id from wkf_instance where id=%s', (instance_id,))
wkf_id = cr.fetchone()[0]
cr.execute('select state,flow_stop from wkf_workitem w left join wkf_activity a on (a.id=w.act_id) where w.inst_id=%s', (instance_id,))
ok=True
for r in cr.fetchall():
if (r[0]<>'complete') or not r[1]:
ok=False
break
if ok:
cr.execute('select distinct a.name from wkf_activity a left join wkf_workitem w on (a.id=w.act_id) where w.inst_id=%s', (instance_id,))
act_names = cr.fetchall()
cr.execute("update wkf_instance set state='complete' where id=%s", (instance_id,))
cr.execute("update wkf_workitem set state='complete' where subflow_id=%s", (instance_id,))
cr.execute("select i.id,w.osv,i.res_id from wkf_instance i left join wkf w on (i.wkf_id=w.id) where i.id IN (select inst_id from wkf_workitem where subflow_id=%s)", (instance_id,))
for cur_instance_id, cur_model_name, cur_record_id in cr.fetchall():
cur_record = Record(cur_model_name, cur_record_id)
for act_name in act_names:
WorkflowInstance(self.session, cur_record, {'id':cur_instance_id}).validate('subflow.%s' % act_name[0])
return ok
def create(session, record, workflow_id):
return WorkflowInstance(session, record).create(workflow_id)
def delete(session, record):
return WorkflowInstance(session, record).delete()
def validate(session, record, instance_id, signal, force_running=False):
return WorkflowInstance(session, record).validate(instance_id, signal, force_running)
def update(session, record, instance_id):
return WorkflowInstance(session, record).update(instance_id)
def _update_end(session, record, instance_id):
return WorkflowInstance(session, record)._update_end(instance_id)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
futuresystems-courses/465-naomi029
|
refs/heads/master
|
naomi_cm_command_hw3ex1.py
|
2
|
which python
python --version
virtualenv -p /usr/bin/python ~/ENV
source $HOME/ENV/bin/activate
pip install cmd3
pip --trusted pypi.python.org install cloudmesh_base
pip --trusted pypi.python.org install cmd3
cm help
cm-generate-command
cm-generate-command naomicommand --path=~
cd cloudmesh_naomicommand/
python setup.py install
cd ~/.cloudmesh/cmd3.yaml #edit file to include command, under modules: "- cloudmesh_naomicommand.plugins"
cm naomicommand
|
githubmlai/numpy
|
refs/heads/master
|
tools/swig/test/testVector.py
|
116
|
#! /usr/bin/env python
from __future__ import division, absolute_import, print_function
# System imports
from distutils.util import get_platform
import os
import sys
import unittest
# Import NumPy
import numpy as np
major, minor = [ int(d) for d in np.__version__.split(".")[:2] ]
if major == 0: BadListError = TypeError
else: BadListError = ValueError
import Vector
######################################################################
class VectorTestCase(unittest.TestCase):
def __init__(self, methodName="runTest"):
unittest.TestCase.__init__(self, methodName)
self.typeStr = "double"
self.typeCode = "d"
# Test the (type IN_ARRAY1[ANY]) typemap
def testLength(self):
"Test length function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
length = Vector.__dict__[self.typeStr + "Length"]
self.assertEquals(length([5, 12, 0]), 13)
# Test the (type IN_ARRAY1[ANY]) typemap
def testLengthBadList(self):
"Test length function with bad list"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
length = Vector.__dict__[self.typeStr + "Length"]
self.assertRaises(BadListError, length, [5, "twelve", 0])
# Test the (type IN_ARRAY1[ANY]) typemap
def testLengthWrongSize(self):
"Test length function with wrong size"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
length = Vector.__dict__[self.typeStr + "Length"]
self.assertRaises(TypeError, length, [5, 12])
# Test the (type IN_ARRAY1[ANY]) typemap
def testLengthWrongDim(self):
"Test length function with wrong dimensions"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
length = Vector.__dict__[self.typeStr + "Length"]
self.assertRaises(TypeError, length, [[1, 2], [3, 4]])
# Test the (type IN_ARRAY1[ANY]) typemap
def testLengthNonContainer(self):
"Test length function with non-container"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
length = Vector.__dict__[self.typeStr + "Length"]
self.assertRaises(TypeError, length, None)
# Test the (type* IN_ARRAY1, int DIM1) typemap
def testProd(self):
"Test prod function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
prod = Vector.__dict__[self.typeStr + "Prod"]
self.assertEquals(prod([1, 2, 3, 4]), 24)
# Test the (type* IN_ARRAY1, int DIM1) typemap
def testProdBadList(self):
"Test prod function with bad list"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
prod = Vector.__dict__[self.typeStr + "Prod"]
self.assertRaises(BadListError, prod, [[1, "two"], ["e", "pi"]])
# Test the (type* IN_ARRAY1, int DIM1) typemap
def testProdWrongDim(self):
"Test prod function with wrong dimensions"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
prod = Vector.__dict__[self.typeStr + "Prod"]
self.assertRaises(TypeError, prod, [[1, 2], [8, 9]])
# Test the (type* IN_ARRAY1, int DIM1) typemap
def testProdNonContainer(self):
"Test prod function with non-container"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
prod = Vector.__dict__[self.typeStr + "Prod"]
self.assertRaises(TypeError, prod, None)
# Test the (int DIM1, type* IN_ARRAY1) typemap
def testSum(self):
"Test sum function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
sum = Vector.__dict__[self.typeStr + "Sum"]
self.assertEquals(sum([5, 6, 7, 8]), 26)
# Test the (int DIM1, type* IN_ARRAY1) typemap
def testSumBadList(self):
"Test sum function with bad list"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
sum = Vector.__dict__[self.typeStr + "Sum"]
self.assertRaises(BadListError, sum, [3, 4, 5, "pi"])
# Test the (int DIM1, type* IN_ARRAY1) typemap
def testSumWrongDim(self):
"Test sum function with wrong dimensions"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
sum = Vector.__dict__[self.typeStr + "Sum"]
self.assertRaises(TypeError, sum, [[3, 4], [5, 6]])
# Test the (int DIM1, type* IN_ARRAY1) typemap
def testSumNonContainer(self):
"Test sum function with non-container"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
sum = Vector.__dict__[self.typeStr + "Sum"]
self.assertRaises(TypeError, sum, True)
# Test the (type INPLACE_ARRAY1[ANY]) typemap
def testReverse(self):
"Test reverse function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
reverse = Vector.__dict__[self.typeStr + "Reverse"]
vector = np.array([1, 2, 4], self.typeCode)
reverse(vector)
self.assertEquals((vector == [4, 2, 1]).all(), True)
# Test the (type INPLACE_ARRAY1[ANY]) typemap
def testReverseWrongDim(self):
"Test reverse function with wrong dimensions"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
reverse = Vector.__dict__[self.typeStr + "Reverse"]
vector = np.array([[1, 2], [3, 4]], self.typeCode)
self.assertRaises(TypeError, reverse, vector)
# Test the (type INPLACE_ARRAY1[ANY]) typemap
def testReverseWrongSize(self):
"Test reverse function with wrong size"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
reverse = Vector.__dict__[self.typeStr + "Reverse"]
vector = np.array([9, 8, 7, 6, 5, 4], self.typeCode)
self.assertRaises(TypeError, reverse, vector)
# Test the (type INPLACE_ARRAY1[ANY]) typemap
def testReverseWrongType(self):
"Test reverse function with wrong type"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
reverse = Vector.__dict__[self.typeStr + "Reverse"]
vector = np.array([1, 2, 4], 'c')
self.assertRaises(TypeError, reverse, vector)
# Test the (type INPLACE_ARRAY1[ANY]) typemap
def testReverseNonArray(self):
"Test reverse function with non-array"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
reverse = Vector.__dict__[self.typeStr + "Reverse"]
self.assertRaises(TypeError, reverse, [2, 4, 6])
# Test the (type* INPLACE_ARRAY1, int DIM1) typemap
def testOnes(self):
"Test ones function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
ones = Vector.__dict__[self.typeStr + "Ones"]
vector = np.zeros(5, self.typeCode)
ones(vector)
np.testing.assert_array_equal(vector, np.array([1, 1, 1, 1, 1]))
# Test the (type* INPLACE_ARRAY1, int DIM1) typemap
def testOnesWrongDim(self):
"Test ones function with wrong dimensions"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
ones = Vector.__dict__[self.typeStr + "Ones"]
vector = np.zeros((5, 5), self.typeCode)
self.assertRaises(TypeError, ones, vector)
# Test the (type* INPLACE_ARRAY1, int DIM1) typemap
def testOnesWrongType(self):
"Test ones function with wrong type"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
ones = Vector.__dict__[self.typeStr + "Ones"]
vector = np.zeros((5, 5), 'c')
self.assertRaises(TypeError, ones, vector)
# Test the (type* INPLACE_ARRAY1, int DIM1) typemap
def testOnesNonArray(self):
"Test ones function with non-array"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
ones = Vector.__dict__[self.typeStr + "Ones"]
self.assertRaises(TypeError, ones, [2, 4, 6, 8])
# Test the (int DIM1, type* INPLACE_ARRAY1) typemap
def testZeros(self):
"Test zeros function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
zeros = Vector.__dict__[self.typeStr + "Zeros"]
vector = np.ones(5, self.typeCode)
zeros(vector)
np.testing.assert_array_equal(vector, np.array([0, 0, 0, 0, 0]))
# Test the (int DIM1, type* INPLACE_ARRAY1) typemap
def testZerosWrongDim(self):
"Test zeros function with wrong dimensions"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
zeros = Vector.__dict__[self.typeStr + "Zeros"]
vector = np.ones((5, 5), self.typeCode)
self.assertRaises(TypeError, zeros, vector)
# Test the (int DIM1, type* INPLACE_ARRAY1) typemap
def testZerosWrongType(self):
"Test zeros function with wrong type"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
zeros = Vector.__dict__[self.typeStr + "Zeros"]
vector = np.ones(6, 'c')
self.assertRaises(TypeError, zeros, vector)
# Test the (int DIM1, type* INPLACE_ARRAY1) typemap
def testZerosNonArray(self):
"Test zeros function with non-array"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
zeros = Vector.__dict__[self.typeStr + "Zeros"]
self.assertRaises(TypeError, zeros, [1, 3, 5, 7, 9])
# Test the (type ARGOUT_ARRAY1[ANY]) typemap
def testEOSplit(self):
"Test eoSplit function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
eoSplit = Vector.__dict__[self.typeStr + "EOSplit"]
even, odd = eoSplit([1, 2, 3])
self.assertEquals((even == [1, 0, 3]).all(), True)
self.assertEquals((odd == [0, 2, 0]).all(), True)
# Test the (type* ARGOUT_ARRAY1, int DIM1) typemap
def testTwos(self):
"Test twos function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
twos = Vector.__dict__[self.typeStr + "Twos"]
vector = twos(5)
self.assertEquals((vector == [2, 2, 2, 2, 2]).all(), True)
# Test the (type* ARGOUT_ARRAY1, int DIM1) typemap
def testTwosNonInt(self):
"Test twos function with non-integer dimension"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
twos = Vector.__dict__[self.typeStr + "Twos"]
self.assertRaises(TypeError, twos, 5.0)
# Test the (int DIM1, type* ARGOUT_ARRAY1) typemap
def testThrees(self):
"Test threes function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
threes = Vector.__dict__[self.typeStr + "Threes"]
vector = threes(6)
self.assertEquals((vector == [3, 3, 3, 3, 3, 3]).all(), True)
# Test the (type* ARGOUT_ARRAY1, int DIM1) typemap
def testThreesNonInt(self):
"Test threes function with non-integer dimension"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
threes = Vector.__dict__[self.typeStr + "Threes"]
self.assertRaises(TypeError, threes, "threes")
######################################################################
class scharTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "schar"
self.typeCode = "b"
######################################################################
class ucharTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "uchar"
self.typeCode = "B"
######################################################################
class shortTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "short"
self.typeCode = "h"
######################################################################
class ushortTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "ushort"
self.typeCode = "H"
######################################################################
class intTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "int"
self.typeCode = "i"
######################################################################
class uintTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "uint"
self.typeCode = "I"
######################################################################
class longTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "long"
self.typeCode = "l"
######################################################################
class ulongTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "ulong"
self.typeCode = "L"
######################################################################
class longLongTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "longLong"
self.typeCode = "q"
######################################################################
class ulongLongTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "ulongLong"
self.typeCode = "Q"
######################################################################
class floatTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "float"
self.typeCode = "f"
######################################################################
class doubleTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "double"
self.typeCode = "d"
######################################################################
if __name__ == "__main__":
# Build the test suite
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite( scharTestCase))
suite.addTest(unittest.makeSuite( ucharTestCase))
suite.addTest(unittest.makeSuite( shortTestCase))
suite.addTest(unittest.makeSuite( ushortTestCase))
suite.addTest(unittest.makeSuite( intTestCase))
suite.addTest(unittest.makeSuite( uintTestCase))
suite.addTest(unittest.makeSuite( longTestCase))
suite.addTest(unittest.makeSuite( ulongTestCase))
suite.addTest(unittest.makeSuite( longLongTestCase))
suite.addTest(unittest.makeSuite(ulongLongTestCase))
suite.addTest(unittest.makeSuite( floatTestCase))
suite.addTest(unittest.makeSuite( doubleTestCase))
# Execute the test suite
print("Testing 1D Functions of Module Vector")
print("NumPy version", np.__version__)
print()
result = unittest.TextTestRunner(verbosity=2).run(suite)
sys.exit(bool(result.errors + result.failures))
|
icio/evil
|
refs/heads/master
|
evil/__init__.py
|
1
|
#!/usr/bin/env python
from collections import OrderedDict
import fnmatch
import os
import re
# Operators which act on expressions to their right are OP_RIGHT operators.
# Operators which act on expressions to their left are OP_LEFT operators.
# Operators which act on both are OP_LEFT | OP_RIGHT = OP_BOTH.
OP_LEFT, OP_RIGHT, OP_BOTH = 1, 2, 3
def evil(expr, lookup, operators, cast, reducer, tokenizer):
"""evil evaluates an expression according to the eval description given.
:param expr: An expression to evaluate.
:param lookup: A callable which takes a single pattern argument and returns
a set of results. The pattern can be anything that is not an
operator token or round brackets.
:param operators: A precedence-ordered dictionary of (function, side)
tuples keyed on the operator token.
:param reducer: A callable which takes a sequential list of values (from
operations or lookups) and combines them into a result.
Typical behaviour is that of the + operator. The return
type should be the same as cast.
:param cast: A callable which transforms the results of the lookup into
the type expected by the operators and the type of the result.
:param tokenizer: A callable which will break the query into tokens for
evaluation per the lookup and operators. Defaults to
setquery.query_tokenizer.
:raises: SyntaxError
:returns:
"""
operators = OrderedDict((op[0], op[1:]) for op in operators)
if "(" in operators or ")" in operators:
raise ValueError("( and ) are reserved operators")
operator_tokens = ["(", ")"] + operators.keys()
tokens = iter(tokenizer(expr, operator_tokens))
levels = [[]]
while True:
# Token evaluation and pattern lookups
expr = levels.pop() # The currently-constructed expression
new_level = False # We should step into a subexpression
first_token = len(expr) == 0 # The first (sub)exp. token
prev_op_side = None # The side of the last-seen operator
try:
# Try to get the side of the last operator from an expression
# which we are going to continue constructing.
prev_op_side = operators[expr[-1]][1]
except:
pass
for token in tokens:
if token == "(":
new_level = True
break
elif token == ")":
break
elif token in operators:
op_side = operators[token][1]
if first_token and op_side & OP_LEFT:
raise SyntaxError("Operators which act on expressions to "
"their left or both sides cannot be at "
"the beginning of an expression.")
if prev_op_side is not None:
if prev_op_side & OP_RIGHT and op_side & OP_LEFT:
raise SyntaxError("Operators cannot be beside one "
"another if they act on expressions "
"facing one-another.")
expr.append(token)
prev_op_side = op_side
continue
else:
expr.append(cast(lookup(token)))
prev_op_side = None
first_token = False
if new_level:
levels.append(expr)
levels.append([])
continue
elif prev_op_side is not None and prev_op_side & OP_RIGHT:
raise SyntaxError("Operators which act on expressions to their "
"right or both sides cannot be at the end of "
"an expression.")
# Operator evaluation
explen = len(expr)
for op, (op_eval, op_side) in operators.iteritems():
if op_side is OP_RIGHT:
# Apply right-sided operators. We loop from the end backward so
# that multiple such operators next to noe another are resolved
# in the correct order
t = explen - 1
while t >= 0:
if expr[t] == op:
expr[t] = op_eval(expr[t + 1])
del expr[t + 1]
explen -= 1
t -= 1
else:
# Apply left- and both-sided operators. We loop forward so that
# that multiple such operators next to one another are resolved
# in the correct order.
t = 0
while t < explen:
if expr[t] == op:
# Apply left- or both-sided operators
if op_side is OP_LEFT:
expr[t] = op_eval(expr[t - 1])
del expr[t - 1]
t -= 1
explen -= 1
elif op_side is OP_BOTH:
expr[t] = op_eval(expr[t - 1], expr[t + 1])
del expr[t + 1], expr[t - 1]
t -= 1
explen -= 2
t += 1
if len(levels) > 0:
levels[-1].append(reducer(expr))
else:
break
return reducer(expr)
def expr_tokenizer(expr, operator_tokens):
"""expr_tokenizer yields the components ("tokens") forming the expression.
Tokens are split by whitespace which is never considered a token in its
own right. operator_tokens should likely include "(" and ")" and strictly
the expression. This means that the word 'test' will be split into ['t',
'e', 'st'] if 'e' is an operator.
:param expr: The expression to break into tokens.
:param operator_tokens: A list of operators to extract as tokens.
"""
operator_tokens.sort(key=len, reverse=True)
for m in re.finditer(
r"""(\s+) | # Whitespace
({0}) | # Operators
(.+?)(?={0}|\s|$) # Patterns
""".format("|".join(re.escape(op) for op in operator_tokens)),
expr, re.X
):
token = m.group(2) or m.group(3)
if token:
yield token
def op(token, func, left=False, right=False):
"""op provides a more verbose syntax for declaring operators.
:param token: The string token of the operator. Usually a single character.
:param func: A callable used to evaluate its arguments. Where the operator
is both-sided the callable should accept two arguments. Where
it is one-sided it should accept one argument.
:param left: A boolean indicating whether the operator applies to the
expression to the left of it.
:param right: A boolean indicating whether the operator applies to the
expression to the right of it.
:returns: a tuple (token, func, side) where side is OP_BOTH if left and
right (or neither) and OP_LEFT if left, otherwise OP_RIGHT.
"""
both = (left == right)
return (token, func, OP_BOTH if both else OP_LEFT if left else OP_RIGHT)
def strlookup(pattern, space):
"""strlookup finds items in the given space matching the given pattern.
:param pattern: The pattern we wish to match by, per fnmatch.
:param space: The superset of patterns matching the given items
"""
return fnmatch.filter(space, pattern)
def globlookup(pattern, root):
"""globlookup finds filesystem objects whose relative path matches the
given pattern.
:param pattern: The pattern to wish to match relative filepaths to.
:param root: The root director to search within.
"""
for subdir, dirnames, filenames in os.walk(root):
d = subdir[len(root) + 1:]
files = (os.path.join(d, f) for f in filenames)
for f in fnmatch.filter(files, pattern):
yield f
|
jonnatas/codeschool
|
refs/heads/master
|
src/cs_questions/migrations/old/0016_auto_20160531_1924.py
|
4
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-31 22:24
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cs_questions', '0015_auto_20160531_1923'),
]
operations = [
migrations.AlterField(
model_name='question',
name='long_description',
field=models.TextField(blank=True, help_text='A detailed explanation.', verbose_name='long description'),
),
migrations.AlterField(
model_name='question',
name='name',
field=models.CharField(max_length=100, verbose_name='name'),
),
migrations.AlterField(
model_name='question',
name='short_description',
field=models.CharField(default='no-description', help_text='A very brief one-phrase description used in listings.', max_length=140, verbose_name='short description'),
),
]
|
zzicewind/nova
|
refs/heads/master
|
nova/objects/dns_domain.py
|
29
|
# Copyright (C) 2014, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import db
from nova import objects
from nova.objects import base
from nova.objects import fields
# TODO(berrange): Remove NovaObjectDictCompat
@base.NovaObjectRegistry.register
class DNSDomain(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'domain': fields.StringField(),
'scope': fields.StringField(nullable=True),
'availability_zone': fields.StringField(nullable=True),
'project_id': fields.StringField(nullable=True),
}
@staticmethod
def _from_db_object(context, vif, db_vif):
for field in vif.fields:
vif[field] = db_vif[field]
vif._context = context
vif.obj_reset_changes()
return vif
@base.remotable_classmethod
def get_by_domain(cls, context, domain):
db_dnsd = db.dnsdomain_get(context, domain)
if db_dnsd:
return cls._from_db_object(context, cls(), db_dnsd)
@base.remotable_classmethod
def register_for_zone(cls, context, domain, zone):
db.dnsdomain_register_for_zone(context, domain, zone)
@base.remotable_classmethod
def register_for_project(cls, context, domain, project):
db.dnsdomain_register_for_project(context, domain, project)
@base.remotable_classmethod
def delete_by_domain(cls, context, domain):
db.dnsdomain_unregister(context, domain)
@base.NovaObjectRegistry.register
class DNSDomainList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('DNSDomain'),
}
obj_relationships = {
'objects': [('1.0', '1.0')],
}
@base.remotable_classmethod
def get_all(cls, context):
db_domains = db.dnsdomain_get_all(context)
return base.obj_make_list(context, cls(context), objects.DNSDomain,
db_domains)
|
asphalt-framework/asphalt-influxdb
|
refs/heads/master
|
tests/test_component.py
|
1
|
import pytest
from asphalt.core import Context
from asphalt.influxdb.client import InfluxDBClient
from asphalt.influxdb.component import InfluxDBComponent
@pytest.mark.asyncio
async def test_default_client(caplog):
"""Test that the default client configuration works as expected."""
async with Context() as context:
await InfluxDBComponent().start(context)
assert isinstance(context.influxdb, InfluxDBClient)
records = [record for record in caplog.records if record.name == 'asphalt.influxdb.component']
records.sort(key=lambda r: r.message)
assert len(records) == 2
assert records[0].message == ("Configured InfluxDB client (default / ctx.influxdb; "
"base_urls=['http://localhost:8086'])")
assert records[1].message == 'InfluxDB client (default) shut down'
@pytest.mark.asyncio
async def test_clustered_client(caplog):
"""Test that a clustered client configuration works as expected."""
async with Context() as context:
base_urls = ['http://influx1.example.org:8086', 'http://influx2.example.org:8087/prefix']
await InfluxDBComponent(base_urls=base_urls).start(context)
assert isinstance(context.influxdb, InfluxDBClient)
records = [record for record in caplog.records if record.name == 'asphalt.influxdb.component']
records.sort(key=lambda r: r.message)
assert len(records) == 2
assert records[0].message == (
"Configured InfluxDB client (default / ctx.influxdb; "
"base_urls=['http://influx1.example.org:8086', 'http://influx2.example.org:8087/prefix'])")
assert records[1].message == 'InfluxDB client (default) shut down'
@pytest.mark.asyncio
async def test_multiple_clients(caplog):
"""Test that a multiple client configuration works as expected."""
async with Context() as context:
await InfluxDBComponent(clients={
'db1': {'base_urls': 'http://localhost:9999'},
'db2': {'base_urls': 'https://remotehost.example.org:443/influx'}
}).start(context)
assert isinstance(context.db1, InfluxDBClient)
assert isinstance(context.db2, InfluxDBClient)
records = [record for record in caplog.records if record.name == 'asphalt.influxdb.component']
records.sort(key=lambda r: r.message)
assert len(records) == 4
assert records[0].message == ("Configured InfluxDB client (db1 / ctx.db1; "
"base_urls=['http://localhost:9999'])")
assert records[1].message == ("Configured InfluxDB client (db2 / ctx.db2; "
"base_urls=['https://remotehost.example.org:443/influx'])")
assert records[2].message == 'InfluxDB client (db1) shut down'
assert records[3].message == 'InfluxDB client (db2) shut down'
|
davidfischer/rpc4django
|
refs/heads/master
|
tests/test_xmlrpcdispatcher.py
|
2
|
'''
XML RPC Dispatcher Tests
-------------------------
'''
import unittest
from django.core.exceptions import ImproperlyConfigured
from decimal import Decimal
try:
from rpc4django.xmlrpcdispatcher import XMLRPCDispatcher
except ImproperlyConfigured:
# Configure Django settings if not already configured
from django.conf import settings
settings.configure(DEBUG=True)
from rpc4django.xmlrpcdispatcher import XMLRPCDispatcher
try:
from xmlrpclib import loads, dumps, Fault
except ImportError:
from xmlrpc.client import loads, dumps, Fault
class TestXMLRPCDispatcher(unittest.TestCase):
def setUp(self):
def echotest(a):
return a
def kwargstest(a, b, **kwargs):
if kwargs.get('c', None) is not None:
return True
return False
def withoutargstest():
return True
def requestargtest(request,a):
return request
self.dispatcher = XMLRPCDispatcher()
self.dispatcher.register_function(echotest, 'echotest')
self.dispatcher.register_function(kwargstest, 'kwargstest')
self.dispatcher.register_function(requestargtest, 'requestargtest')
self.dispatcher.register_function(withoutargstest, 'withoutargstest')
self.dispatcher.register_multicall_functions()
def test_kwargs(self):
xml = dumps((1, 2), 'kwargstest')
ret = self.dispatcher.dispatch(xml)
out, name = loads(ret)
self.assertFalse(out[0])
ret = self.dispatcher.dispatch(xml, c=1)
out, name = loads(ret)
self.assertTrue(out[0])
xml = dumps((1,),'requestargtest')
ret = self.dispatcher.dispatch(xml, request=True)
out, name = loads(ret)
self.assertTrue(out[0])
xml = """<?xml version='1.0'?>
<methodCall>
<methodName>withoutargstest</methodName>
<params>
</params>
</methodCall>
"""
ret = self.dispatcher.dispatch(xml, request='fakerequest')
out, name = loads(ret)
self.assertTrue(out[0])
def test_billion_laughs(self):
payload = """<?xml version="1.0"?>
<!DOCTYPE lolz [
<!ENTITY lol "lol">
<!ELEMENT lolz (#PCDATA)>
<!ENTITY lol1 "&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;">
<!ENTITY lol2 "&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;">
<!ENTITY lol3 "&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;">
<!ENTITY lol4 "&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;">
<!ENTITY lol5 "&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;">
<!ENTITY lol6 "&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;">
<!ENTITY lol7 "&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;">
<!ENTITY lol8 "&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;">
<!ENTITY lol9 "&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;">
]>
<lolz>&lol9;</lolz>"""
ret = self.dispatcher.dispatch(payload)
self.assertRaises(Fault, loads, ret)
def test_decimal(self):
d = Decimal('1.23456')
xml = dumps((d,), 'echotest')
ret = self.dispatcher.dispatch(xml)
out, name = loads(ret)
self.assertEqual(d, out[0])
self.assertTrue(isinstance(out[0], Decimal))
if __name__ == '__main__':
unittest.main()
|
tamimcse/ns-3-dev-git
|
refs/heads/master
|
src/core/examples/sample-rng-plot.py
|
188
|
# -*- Mode:Python; -*-
# /*
# * This program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License version 2 as
# * published by the Free Software Foundation
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# */
# Demonstrate use of ns-3 as a random number generator integrated with
# plotting tools; adapted from Gustavo Carneiro's ns-3 tutorial
import numpy as np
import matplotlib.pyplot as plt
import ns.core
# mu, var = 100, 225
rng = ns.core.NormalVariable(100.0, 225.0)
x = [rng.GetValue() for t in range(10000)]
# the histogram of the data
n, bins, patches = plt.hist(x, 50, normed=1, facecolor='g', alpha=0.75)
plt.title('ns-3 histogram')
plt.text(60, .025, r'$\mu=100,\ \sigma=15$')
plt.axis([40, 160, 0, 0.03])
plt.grid(True)
plt.show()
|
mmazanec22/too-windy
|
refs/heads/master
|
env/lib/python3.5/site-packages/pip/vcs/git.py
|
340
|
from __future__ import absolute_import
import logging
import tempfile
import os.path
from pip.compat import samefile
from pip.exceptions import BadCommand
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
from pip._vendor.packaging.version import parse as parse_version
from pip.utils import display_path, rmtree
from pip.vcs import vcs, VersionControl
urlsplit = urllib_parse.urlsplit
urlunsplit = urllib_parse.urlunsplit
logger = logging.getLogger(__name__)
class Git(VersionControl):
name = 'git'
dirname = '.git'
repo_name = 'clone'
schemes = (
'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
)
def __init__(self, url=None, *args, **kwargs):
# Works around an apparent Git bug
# (see http://article.gmane.org/gmane.comp.version-control.git/146500)
if url:
scheme, netloc, path, query, fragment = urlsplit(url)
if scheme.endswith('file'):
initial_slashes = path[:-len(path.lstrip('/'))]
newpath = (
initial_slashes +
urllib_request.url2pathname(path)
.replace('\\', '/').lstrip('/')
)
url = urlunsplit((scheme, netloc, newpath, query, fragment))
after_plus = scheme.find('+') + 1
url = scheme[:after_plus] + urlunsplit(
(scheme[after_plus:], netloc, newpath, query, fragment),
)
super(Git, self).__init__(url, *args, **kwargs)
def get_git_version(self):
VERSION_PFX = 'git version '
version = self.run_command(['version'], show_stdout=False)
if version.startswith(VERSION_PFX):
version = version[len(VERSION_PFX):]
else:
version = ''
# get first 3 positions of the git version becasue
# on windows it is x.y.z.windows.t, and this parses as
# LegacyVersion which always smaller than a Version.
version = '.'.join(version.split('.')[:3])
return parse_version(version)
def export(self, location):
"""Export the Git repository at the url to the destination location"""
temp_dir = tempfile.mkdtemp('-export', 'pip-')
self.unpack(temp_dir)
try:
if not location.endswith('/'):
location = location + '/'
self.run_command(
['checkout-index', '-a', '-f', '--prefix', location],
show_stdout=False, cwd=temp_dir)
finally:
rmtree(temp_dir)
def check_rev_options(self, rev, dest, rev_options):
"""Check the revision options before checkout to compensate that tags
and branches may need origin/ as a prefix.
Returns the SHA1 of the branch or tag if found.
"""
revisions = self.get_short_refs(dest)
origin_rev = 'origin/%s' % rev
if origin_rev in revisions:
# remote branch
return [revisions[origin_rev]]
elif rev in revisions:
# a local tag or branch name
return [revisions[rev]]
else:
logger.warning(
"Could not find a tag or branch '%s', assuming commit.", rev,
)
return rev_options
def check_version(self, dest, rev_options):
"""
Compare the current sha to the ref. ref may be a branch or tag name,
but current rev will always point to a sha. This means that a branch
or tag will never compare as True. So this ultimately only matches
against exact shas.
"""
return self.get_revision(dest).startswith(rev_options[0])
def switch(self, dest, url, rev_options):
self.run_command(['config', 'remote.origin.url', url], cwd=dest)
self.run_command(['checkout', '-q'] + rev_options, cwd=dest)
self.update_submodules(dest)
def update(self, dest, rev_options):
# First fetch changes from the default remote
if self.get_git_version() >= parse_version('1.9.0'):
# fetch tags in addition to everything else
self.run_command(['fetch', '-q', '--tags'], cwd=dest)
else:
self.run_command(['fetch', '-q'], cwd=dest)
# Then reset to wanted revision (maybe even origin/master)
if rev_options:
rev_options = self.check_rev_options(
rev_options[0], dest, rev_options,
)
self.run_command(['reset', '--hard', '-q'] + rev_options, cwd=dest)
#: update submodules
self.update_submodules(dest)
def obtain(self, dest):
url, rev = self.get_url_rev()
if rev:
rev_options = [rev]
rev_display = ' (to %s)' % rev
else:
rev_options = ['origin/master']
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.info(
'Cloning %s%s to %s', url, rev_display, display_path(dest),
)
self.run_command(['clone', '-q', url, dest])
if rev:
rev_options = self.check_rev_options(rev, dest, rev_options)
# Only do a checkout if rev_options differs from HEAD
if not self.check_version(dest, rev_options):
self.run_command(
['checkout', '-q'] + rev_options,
cwd=dest,
)
#: repo may contain submodules
self.update_submodules(dest)
def get_url(self, location):
"""Return URL of the first remote encountered."""
remotes = self.run_command(
['config', '--get-regexp', 'remote\..*\.url'],
show_stdout=False, cwd=location)
remotes = remotes.splitlines()
found_remote = remotes[0]
for remote in remotes:
if remote.startswith('remote.origin.url '):
found_remote = remote
break
url = found_remote.split(' ')[1]
return url.strip()
def get_revision(self, location):
current_rev = self.run_command(
['rev-parse', 'HEAD'], show_stdout=False, cwd=location)
return current_rev.strip()
def get_full_refs(self, location):
"""Yields tuples of (commit, ref) for branches and tags"""
output = self.run_command(['show-ref'],
show_stdout=False, cwd=location)
for line in output.strip().splitlines():
commit, ref = line.split(' ', 1)
yield commit.strip(), ref.strip()
def is_ref_remote(self, ref):
return ref.startswith('refs/remotes/')
def is_ref_branch(self, ref):
return ref.startswith('refs/heads/')
def is_ref_tag(self, ref):
return ref.startswith('refs/tags/')
def is_ref_commit(self, ref):
"""A ref is a commit sha if it is not anything else"""
return not any((
self.is_ref_remote(ref),
self.is_ref_branch(ref),
self.is_ref_tag(ref),
))
# Should deprecate `get_refs` since it's ambiguous
def get_refs(self, location):
return self.get_short_refs(location)
def get_short_refs(self, location):
"""Return map of named refs (branches or tags) to commit hashes."""
rv = {}
for commit, ref in self.get_full_refs(location):
ref_name = None
if self.is_ref_remote(ref):
ref_name = ref[len('refs/remotes/'):]
elif self.is_ref_branch(ref):
ref_name = ref[len('refs/heads/'):]
elif self.is_ref_tag(ref):
ref_name = ref[len('refs/tags/'):]
if ref_name is not None:
rv[ref_name] = commit
return rv
def _get_subdirectory(self, location):
"""Return the relative path of setup.py to the git repo root."""
# find the repo root
git_dir = self.run_command(['rev-parse', '--git-dir'],
show_stdout=False, cwd=location).strip()
if not os.path.isabs(git_dir):
git_dir = os.path.join(location, git_dir)
root_dir = os.path.join(git_dir, '..')
# find setup.py
orig_location = location
while not os.path.exists(os.path.join(location, 'setup.py')):
last_location = location
location = os.path.dirname(location)
if location == last_location:
# We've traversed up to the root of the filesystem without
# finding setup.py
logger.warning(
"Could not find setup.py for directory %s (tried all "
"parent directories)",
orig_location,
)
return None
# relative path of setup.py to repo root
if samefile(root_dir, location):
return None
return os.path.relpath(location, root_dir)
def get_src_requirement(self, dist, location):
repo = self.get_url(location)
if not repo.lower().startswith('git:'):
repo = 'git+' + repo
egg_project_name = dist.egg_name().split('-', 1)[0]
if not repo:
return None
current_rev = self.get_revision(location)
req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
subdirectory = self._get_subdirectory(location)
if subdirectory:
req += '&subdirectory=' + subdirectory
return req
def get_url_rev(self):
"""
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
That's required because although they use SSH they sometimes doesn't
work with a ssh:// scheme (e.g. Github). But we need a scheme for
parsing. Hence we remove it again afterwards and return it as a stub.
"""
if '://' not in self.url:
assert 'file:' not in self.url
self.url = self.url.replace('git+', 'git+ssh://')
url, rev = super(Git, self).get_url_rev()
url = url.replace('ssh://', '')
else:
url, rev = super(Git, self).get_url_rev()
return url, rev
def update_submodules(self, location):
if not os.path.exists(os.path.join(location, '.gitmodules')):
return
self.run_command(
['submodule', 'update', '--init', '--recursive', '-q'],
cwd=location,
)
@classmethod
def controls_location(cls, location):
if super(Git, cls).controls_location(location):
return True
try:
r = cls().run_command(['rev-parse'],
cwd=location,
show_stdout=False,
on_returncode='ignore')
return not r
except BadCommand:
logger.debug("could not determine if %s is under git control "
"because git is not available", location)
return False
vcs.register(Git)
|
bsmr-eve/Pyfa
|
refs/heads/master
|
eos/effects/shipbonuspiratesmallhybriddmg.py
|
1
|
# shipBonusPirateSmallHybridDmg
#
# Used by:
# Ship: Daredevil
# Ship: Hecate
# Ship: Sunesis
type = "passive"
def handler(fit, ship, context):
fit.modules.filteredItemBoost(lambda mod: mod.item.requiresSkill("Small Hybrid Turret"),
"damageMultiplier", ship.getModifiedItemAttr("shipBonusRole7"))
|
erwilan/ansible
|
refs/heads/devel
|
lib/ansible/executor/playbook_executor.py
|
57
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.module_utils._text import to_native, to_text
from ansible.playbook import Playbook
from ansible.template import Templar
from ansible.utils.helpers import pct_to_int
from ansible.utils.path import makedirs_safe
from ansible.utils.ssh_functions import check_for_controlpersist
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class PlaybookExecutor:
'''
This is the primary class for executing playbooks, and thus the
basis for bin/ansible-playbook operation.
'''
def __init__(self, playbooks, inventory, variable_manager, loader, options, passwords):
self._playbooks = playbooks
self._inventory = inventory
self._variable_manager = variable_manager
self._loader = loader
self._options = options
self.passwords = passwords
self._unreachable_hosts = dict()
if options.listhosts or options.listtasks or options.listtags or options.syntax:
self._tqm = None
else:
self._tqm = TaskQueueManager(inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=self.passwords)
# Note: We run this here to cache whether the default ansible ssh
# executable supports control persist. Sometime in the future we may
# need to enhance this to check that ansible_ssh_executable specified
# in inventory is also cached. We can't do this caching at the point
# where it is used (in task_executor) because that is post-fork and
# therefore would be discarded after every task.
check_for_controlpersist(C.ANSIBLE_SSH_EXECUTABLE)
def run(self):
'''
Run the given playbook, based on the settings in the play which
may limit the runs to serialized groups, etc.
'''
result = 0
entrylist = []
entry = {}
try:
for playbook_path in self._playbooks:
pb = Playbook.load(playbook_path, variable_manager=self._variable_manager, loader=self._loader)
self._inventory.set_playbook_basedir(os.path.realpath(os.path.dirname(playbook_path)))
if self._tqm is None: # we are doing a listing
entry = {'playbook': playbook_path}
entry['plays'] = []
else:
# make sure the tqm has callbacks loaded
self._tqm.load_callbacks()
self._tqm.send_callback('v2_playbook_on_start', pb)
i = 1
plays = pb.get_plays()
display.vv(u'%d plays in %s' % (len(plays), to_text(playbook_path)))
for play in plays:
if play._included_path is not None:
self._loader.set_basedir(play._included_path)
else:
self._loader.set_basedir(pb._basedir)
# clear any filters which may have been applied to the inventory
self._inventory.remove_restriction()
if play.vars_prompt:
for var in play.vars_prompt:
vname = var['name']
prompt = var.get("prompt", vname)
default = var.get("default", None)
private = var.get("private", True)
confirm = var.get("confirm", False)
encrypt = var.get("encrypt", None)
salt_size = var.get("salt_size", None)
salt = var.get("salt", None)
if vname not in self._variable_manager.extra_vars:
if self._tqm:
self._tqm.send_callback('v2_playbook_on_vars_prompt', vname, private, prompt, encrypt, confirm, salt_size, salt, default)
play.vars[vname] = display.do_var_prompt(vname, private, prompt, encrypt, confirm, salt_size, salt, default)
else: # we are either in --list-<option> or syntax check
play.vars[vname] = default
# Create a temporary copy of the play here, so we can run post_validate
# on it without the templating changes affecting the original object.
all_vars = self._variable_manager.get_vars(loader=self._loader, play=play)
templar = Templar(loader=self._loader, variables=all_vars)
new_play = play.copy()
new_play.post_validate(templar)
if self._options.syntax:
continue
if self._tqm is None:
# we are just doing a listing
entry['plays'].append(new_play)
else:
self._tqm._unreachable_hosts.update(self._unreachable_hosts)
previously_failed = len(self._tqm._failed_hosts)
previously_unreachable = len(self._tqm._unreachable_hosts)
break_play = False
# we are actually running plays
batches = self._get_serialized_batches(new_play)
if len(batches) == 0:
self._tqm.send_callback('v2_playbook_on_play_start', new_play)
self._tqm.send_callback('v2_playbook_on_no_hosts_matched')
for batch in batches:
# restrict the inventory to the hosts in the serialized batch
self._inventory.restrict_to_hosts(batch)
# and run it...
result = self._tqm.run(play=play)
# break the play if the result equals the special return code
if result & self._tqm.RUN_FAILED_BREAK_PLAY != 0:
result = self._tqm.RUN_FAILED_HOSTS
break_play = True
# check the number of failures here, to see if they're above the maximum
# failure percentage allowed, or if any errors are fatal. If either of those
# conditions are met, we break out, otherwise we only break out if the entire
# batch failed
failed_hosts_count = len(self._tqm._failed_hosts) + len(self._tqm._unreachable_hosts) - \
(previously_failed + previously_unreachable)
if len(batch) == failed_hosts_count:
break_play = True
break
# update the previous counts so they don't accumulate incorrectly
# over multiple serial batches
previously_failed += len(self._tqm._failed_hosts) - previously_failed
previously_unreachable += len(self._tqm._unreachable_hosts) - previously_unreachable
# save the unreachable hosts from this batch
self._unreachable_hosts.update(self._tqm._unreachable_hosts)
if break_play:
break
i = i + 1 # per play
if entry:
entrylist.append(entry) # per playbook
# send the stats callback for this playbook
if self._tqm is not None:
if C.RETRY_FILES_ENABLED:
retries = set(self._tqm._failed_hosts.keys())
retries.update(self._tqm._unreachable_hosts.keys())
retries = sorted(retries)
if len(retries) > 0:
if C.RETRY_FILES_SAVE_PATH:
basedir = C.shell_expand(C.RETRY_FILES_SAVE_PATH)
elif playbook_path:
basedir = os.path.dirname(os.path.abspath(playbook_path))
else:
basedir = '~/'
(retry_name, _) = os.path.splitext(os.path.basename(playbook_path))
filename = os.path.join(basedir, "%s.retry" % retry_name)
if self._generate_retry_inventory(filename, retries):
display.display("\tto retry, use: --limit @%s\n" % filename)
self._tqm.send_callback('v2_playbook_on_stats', self._tqm._stats)
# if the last result wasn't zero, break out of the playbook file name loop
if result != 0:
break
if entrylist:
return entrylist
finally:
if self._tqm is not None:
self._tqm.cleanup()
if self._loader:
self._loader.cleanup_all_tmp_files()
if self._options.syntax:
display.display("No issues encountered")
return result
return result
def _get_serialized_batches(self, play):
'''
Returns a list of hosts, subdivided into batches based on
the serial size specified in the play.
'''
# make sure we have a unique list of hosts
all_hosts = self._inventory.get_hosts(play.hosts)
all_hosts_len = len(all_hosts)
# the serial value can be listed as a scalar or a list of
# scalars, so we make sure it's a list here
serial_batch_list = play.serial
if len(serial_batch_list) == 0:
serial_batch_list = [-1]
cur_item = 0
serialized_batches = []
while len(all_hosts) > 0:
# get the serial value from current item in the list
serial = pct_to_int(serial_batch_list[cur_item], all_hosts_len)
# if the serial count was not specified or is invalid, default to
# a list of all hosts, otherwise grab a chunk of the hosts equal
# to the current serial item size
if serial <= 0:
serialized_batches.append(all_hosts)
break
else:
play_hosts = []
for x in range(serial):
if len(all_hosts) > 0:
play_hosts.append(all_hosts.pop(0))
serialized_batches.append(play_hosts)
# increment the current batch list item number, and if we've hit
# the end keep using the last element until we've consumed all of
# the hosts in the inventory
cur_item += 1
if cur_item > len(serial_batch_list) - 1:
cur_item = len(serial_batch_list) - 1
return serialized_batches
def _generate_retry_inventory(self, retry_path, replay_hosts):
'''
Called when a playbook run fails. It generates an inventory which allows
re-running on ONLY the failed hosts. This may duplicate some variable
information in group_vars/host_vars but that is ok, and expected.
'''
try:
makedirs_safe(os.path.dirname(retry_path))
with open(retry_path, 'w') as fd:
for x in replay_hosts:
fd.write("%s\n" % x)
except Exception as e:
display.warning("Could not create retry file '%s'.\n\t%s" % (retry_path, to_native(e)))
return False
return True
|
gmalmquist/pants
|
refs/heads/master
|
src/python/pants/java/nailgun_io.py
|
14
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import errno
import io
import os
import select
import socket
import threading
from contextlib import contextmanager
from pants.java.nailgun_protocol import ChunkType, NailgunProtocol
class NailgunStreamReader(threading.Thread):
"""Reads input from stdin and emits Nailgun 'stdin' chunks over a socket."""
SELECT_TIMEOUT = 1
def __init__(self, in_fd, sock, buf_size=io.DEFAULT_BUFFER_SIZE, select_timeout=SELECT_TIMEOUT):
"""
:param file in_fd: the input file descriptor (e.g. sys.stdin) to read from.
:param socket sock: the socket to emit nailgun protocol chunks over.
:param int buf_size: the buffer size for reads from the file descriptor.
:param int select_timeout: the timeout (in seconds) for select.select() calls against the fd.
"""
super(NailgunStreamReader, self).__init__()
self.daemon = True
self._stdin = in_fd
self._socket = sock
self._buf_size = buf_size
self._select_timeout = select_timeout
# N.B. This Event is used as nothing more than a convenient atomic flag - nothing waits on it.
self._stopped = threading.Event()
@property
def is_stopped(self):
"""Indicates whether or not the instance is stopped."""
return self._stopped.is_set()
def stop(self):
"""Stops the instance."""
self._stopped.set()
@contextmanager
def running(self):
self.start()
yield
self.stop()
def run(self):
while not self.is_stopped:
readable, _, errored = select.select([self._stdin], [], [self._stdin], self._select_timeout)
if self._stdin in errored:
self.stop()
return
if not self.is_stopped and self._stdin in readable:
data = os.read(self._stdin.fileno(), self._buf_size)
if not self.is_stopped:
if data:
NailgunProtocol.write_chunk(self._socket, ChunkType.STDIN, data)
else:
NailgunProtocol.write_chunk(self._socket, ChunkType.STDIN_EOF)
try:
self._socket.shutdown(socket.SHUT_WR) # Shutdown socket sends.
except socket.error: # Can happen if response is quick.
pass
finally:
self.stop()
class NailgunStreamWriter(object):
"""A sys.{stdout,stderr} replacement that writes output to a socket using the nailgun protocol."""
def __init__(self, sock, chunk_type, isatty=True, mask_broken_pipe=False):
"""
:param socket sock: A connected socket capable of speaking the nailgun protocol.
:param str chunk_type: A ChunkType constant representing the nailgun protocol chunk type.
:param bool isatty: Whether or not the consumer of this stream has tty capabilities. (Optional)
:param bool mask_broken_pipe: This will toggle the masking of 'broken pipe' errors when writing
to the remote socket. This allows for completion of execution in
the event of a client disconnect (e.g. to support cleanup work).
"""
self._socket = sock
self._chunk_type = chunk_type
self._isatty = isatty
self._mask_broken_pipe = mask_broken_pipe
def write(self, payload):
try:
NailgunProtocol.write_chunk(self._socket, self._chunk_type, payload)
except IOError as e:
# If the remote client disconnects and we try to perform a write (e.g. socket.send/sendall),
# an 'error: [Errno 32] Broken pipe' exception can be thrown. Setting mask_broken_pipe=True
# safeguards against this case (which is unexpected for most writers of sys.stdout etc) so
# that we don't awkwardly interrupt the runtime by throwing this exception on writes to
# stdout/stderr.
if e.errno == errno.EPIPE and not self._mask_broken_pipe:
raise
def flush(self):
return
def isatty(self):
return self._isatty
|
Paul-L/android_gio_stock_kernel
|
refs/heads/cm-10.1
|
scripts/build-all.py
|
282
|
#! /usr/bin/env python
# Copyright (c) 2009, Code Aurora Forum. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Code Aurora nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Build the kernel for all targets using the Android build environment.
#
# TODO: Accept arguments to indicate what to build.
import glob
from optparse import OptionParser
import subprocess
import os
import os.path
import shutil
import sys
version = 'build-all.py, version 0.01'
build_dir = '../all-kernels'
make_command = ["vmlinux", "modules"]
make_env = os.environ
make_env.update({
'ARCH': 'arm',
'CROSS_COMPILE': 'arm-none-linux-gnueabi-',
'KCONFIG_NOTIMESTAMP': 'true' })
all_options = {}
def error(msg):
sys.stderr.write("error: %s\n" % msg)
def fail(msg):
"""Fail with a user-printed message"""
error(msg)
sys.exit(1)
def check_kernel():
"""Ensure that PWD is a kernel directory"""
if (not os.path.isfile('MAINTAINERS') or
not os.path.isfile('arch/arm/mach-msm/Kconfig')):
fail("This doesn't seem to be an MSM kernel dir")
def check_build():
"""Ensure that the build directory is present."""
if not os.path.isdir(build_dir):
try:
os.makedirs(build_dir)
except OSError as exc:
if exc.errno == errno.EEXIST:
pass
else:
raise
def update_config(file, str):
print 'Updating %s with \'%s\'\n' % (file, str)
defconfig = open(file, 'a')
defconfig.write(str + '\n')
defconfig.close()
def scan_configs():
"""Get the full list of defconfigs appropriate for this tree."""
names = {}
for n in glob.glob('arch/arm/configs/msm[0-9]*_defconfig'):
names[os.path.basename(n)[:-10]] = n
for n in glob.glob('arch/arm/configs/qsd*_defconfig'):
names[os.path.basename(n)[:-10]] = n
return names
class Builder:
def __init__(self, logname):
self.logname = logname
self.fd = open(logname, 'w')
def run(self, args):
devnull = open('/dev/null', 'r')
proc = subprocess.Popen(args, stdin=devnull,
env=make_env,
bufsize=0,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
count = 0
# for line in proc.stdout:
rawfd = proc.stdout.fileno()
while True:
line = os.read(rawfd, 1024)
if not line:
break
self.fd.write(line)
self.fd.flush()
if all_options.verbose:
sys.stdout.write(line)
sys.stdout.flush()
else:
for i in range(line.count('\n')):
count += 1
if count == 64:
count = 0
print
sys.stdout.write('.')
sys.stdout.flush()
print
result = proc.wait()
self.fd.close()
return result
failed_targets = []
def build(target):
dest_dir = os.path.join(build_dir, target)
log_name = '%s/log-%s.log' % (build_dir, target)
print 'Building %s in %s log %s' % (target, dest_dir, log_name)
if not os.path.isdir(dest_dir):
os.mkdir(dest_dir)
defconfig = 'arch/arm/configs/%s_defconfig' % target
dotconfig = '%s/.config' % dest_dir
shutil.copyfile(defconfig, dotconfig)
devnull = open('/dev/null', 'r')
subprocess.check_call(['make', 'O=%s' % dest_dir,
'%s_defconfig' % target], env=make_env, stdin=devnull)
devnull.close()
if not all_options.updateconfigs:
build = Builder(log_name)
result = build.run(['make', 'O=%s' % dest_dir] + make_command)
if result != 0:
if all_options.keep_going:
failed_targets.append(target)
fail_or_error = error
else:
fail_or_error = fail
fail_or_error("Failed to build %s, see %s" % (target, build.logname))
# Copy the defconfig back.
if all_options.configs or all_options.updateconfigs:
shutil.copyfile(dotconfig, defconfig)
def build_many(allconf, targets):
print "Building %d target(s)" % len(targets)
for target in targets:
if all_options.updateconfigs:
update_config(allconf[target], all_options.updateconfigs)
build(target)
if failed_targets:
fail('\n '.join(["Failed targets:"] +
[target for target in failed_targets]))
def main():
check_kernel()
check_build()
configs = scan_configs()
usage = ("""
%prog [options] all -- Build all targets
%prog [options] target target ... -- List specific targets
%prog [options] perf -- Build all perf targets
%prog [options] noperf -- Build all non-perf targets""")
parser = OptionParser(usage=usage, version=version)
parser.add_option('--configs', action='store_true',
dest='configs',
help="Copy configs back into tree")
parser.add_option('--list', action='store_true',
dest='list',
help='List available targets')
parser.add_option('-v', '--verbose', action='store_true',
dest='verbose',
help='Output to stdout in addition to log file')
parser.add_option('--oldconfig', action='store_true',
dest='oldconfig',
help='Only process "make oldconfig"')
parser.add_option('--updateconfigs',
dest='updateconfigs',
help="Update defconfigs with provided option setting, "
"e.g. --updateconfigs=\'CONFIG_USE_THING=y\'")
parser.add_option('-j', '--jobs', type='int', dest="jobs",
help="Number of simultaneous jobs")
parser.add_option('-l', '--load-average', type='int',
dest='load_average',
help="Don't start multiple jobs unless load is below LOAD_AVERAGE")
parser.add_option('-k', '--keep-going', action='store_true',
dest='keep_going', default=False,
help="Keep building other targets if a target fails")
(options, args) = parser.parse_args()
global all_options
all_options = options
if options.list:
print "Available targets:"
for target in configs.keys():
print " %s" % target
sys.exit(0)
if options.oldconfig:
global make_command
make_command = ["oldconfig"]
if options.jobs:
make_command.append("-j%d" % options.jobs)
if options.load_average:
make_command.append("-l%d" % options.load_average)
if args == ['all']:
build_many(configs, configs.keys())
elif args == ['perf']:
targets = []
for t in configs.keys():
if "perf" in t:
targets.append(t)
build_many(configs, targets)
elif args == ['noperf']:
targets = []
for t in configs.keys():
if "perf" not in t:
targets.append(t)
build_many(configs, targets)
elif len(args) > 0:
targets = []
for t in args:
if t not in configs.keys():
parser.error("Target '%s' not one of %s" % (t, configs.keys()))
targets.append(t)
build_many(configs, targets)
else:
parser.error("Must specify a target to build, or 'all'")
if __name__ == "__main__":
main()
|
windyuuy/opera
|
refs/heads/master
|
chromium/src/third_party/python_26/Lib/test/test_winreg.py
|
56
|
# Test the windows specific win32reg module.
# Only win32reg functions not hit here: FlushKey, LoadKey and SaveKey
from _winreg import *
import os, sys
import unittest
from test import test_support
test_key_name = "SOFTWARE\\Python Registry Test Key - Delete Me"
test_data = [
("Int Value", 45, REG_DWORD),
("String Val", "A string value", REG_SZ),
("StringExpand", "The path is %path%", REG_EXPAND_SZ),
("Multi-string", ["Lots", "of", "string", "values"], REG_MULTI_SZ),
("Raw Data", ("binary"+chr(0)+"data"), REG_BINARY),
("Big String", "x"*(2**14-1), REG_SZ),
("Big Binary", "x"*(2**14), REG_BINARY),
]
if test_support.have_unicode:
test_data += [
(unicode("Unicode Val"), unicode("A Unicode value"), REG_SZ,),
("UnicodeExpand", unicode("The path is %path%"), REG_EXPAND_SZ),
("Multi-unicode", [unicode("Lots"), unicode("of"), unicode("unicode"),
unicode("values")], REG_MULTI_SZ),
("Multi-mixed", [unicode("Unicode"), unicode("and"), "string",
"values"], REG_MULTI_SZ),
]
class WinregTests(unittest.TestCase):
remote_name = None
def WriteTestData(self, root_key):
# Set the default value for this key.
SetValue(root_key, test_key_name, REG_SZ, "Default value")
key = CreateKey(root_key, test_key_name)
# Create a sub-key
sub_key = CreateKey(key, "sub_key")
# Give the sub-key some named values
for value_name, value_data, value_type in test_data:
SetValueEx(sub_key, value_name, 0, value_type, value_data)
# Check we wrote as many items as we thought.
nkeys, nvalues, since_mod = QueryInfoKey(key)
self.assertEquals(nkeys, 1, "Not the correct number of sub keys")
self.assertEquals(nvalues, 1, "Not the correct number of values")
nkeys, nvalues, since_mod = QueryInfoKey(sub_key)
self.assertEquals(nkeys, 0, "Not the correct number of sub keys")
self.assertEquals(nvalues, len(test_data),
"Not the correct number of values")
# Close this key this way...
# (but before we do, copy the key as an integer - this allows
# us to test that the key really gets closed).
int_sub_key = int(sub_key)
CloseKey(sub_key)
try:
QueryInfoKey(int_sub_key)
self.fail("It appears the CloseKey() function does "
"not close the actual key!")
except EnvironmentError:
pass
# ... and close that key that way :-)
int_key = int(key)
key.Close()
try:
QueryInfoKey(int_key)
self.fail("It appears the key.Close() function "
"does not close the actual key!")
except EnvironmentError:
pass
def ReadTestData(self, root_key):
# Check we can get default value for this key.
val = QueryValue(root_key, test_key_name)
self.assertEquals(val, "Default value",
"Registry didn't give back the correct value")
key = OpenKey(root_key, test_key_name)
# Read the sub-keys
with OpenKey(key, "sub_key") as sub_key:
# Check I can enumerate over the values.
index = 0
while 1:
try:
data = EnumValue(sub_key, index)
except EnvironmentError:
break
self.assertEquals(data in test_data, True,
"Didn't read back the correct test data")
index = index + 1
self.assertEquals(index, len(test_data),
"Didn't read the correct number of items")
# Check I can directly access each item
for value_name, value_data, value_type in test_data:
read_val, read_typ = QueryValueEx(sub_key, value_name)
self.assertEquals(read_val, value_data,
"Could not directly read the value")
self.assertEquals(read_typ, value_type,
"Could not directly read the value")
sub_key.Close()
# Enumerate our main key.
read_val = EnumKey(key, 0)
self.assertEquals(read_val, "sub_key", "Read subkey value wrong")
try:
EnumKey(key, 1)
self.fail("Was able to get a second key when I only have one!")
except EnvironmentError:
pass
key.Close()
def DeleteTestData(self, root_key):
key = OpenKey(root_key, test_key_name, 0, KEY_ALL_ACCESS)
sub_key = OpenKey(key, "sub_key", 0, KEY_ALL_ACCESS)
# It is not necessary to delete the values before deleting
# the key (although subkeys must not exist). We delete them
# manually just to prove we can :-)
for value_name, value_data, value_type in test_data:
DeleteValue(sub_key, value_name)
nkeys, nvalues, since_mod = QueryInfoKey(sub_key)
self.assertEquals(nkeys, 0, "subkey not empty before delete")
self.assertEquals(nvalues, 0, "subkey not empty before delete")
sub_key.Close()
DeleteKey(key, "sub_key")
try:
# Shouldnt be able to delete it twice!
DeleteKey(key, "sub_key")
self.fail("Deleting the key twice succeeded")
except EnvironmentError:
pass
key.Close()
DeleteKey(root_key, test_key_name)
# Opening should now fail!
try:
key = OpenKey(root_key, test_key_name)
self.fail("Could open the non-existent key")
except WindowsError: # Use this error name this time
pass
def TestAll(self, root_key):
self.WriteTestData(root_key)
self.ReadTestData(root_key)
self.DeleteTestData(root_key)
def testLocalMachineRegistryWorks(self):
self.TestAll(HKEY_CURRENT_USER)
def testConnectRegistryToLocalMachineWorks(self):
# perform minimal ConnectRegistry test which just invokes it
h = ConnectRegistry(None, HKEY_LOCAL_MACHINE)
h.Close()
def testRemoteMachineRegistryWorks(self):
if not self.remote_name:
return # remote machine name not specified
remote_key = ConnectRegistry(self.remote_name, HKEY_CURRENT_USER)
self.TestAll(remote_key)
def testExpandEnvironmentStrings(self):
r = ExpandEnvironmentStrings(u"%windir%\\test")
self.assertEqual(type(r), unicode)
self.assertEqual(r, os.environ["windir"] + "\\test")
def test_main():
test_support.run_unittest(WinregTests)
if __name__ == "__main__":
try:
WinregTests.remote_name = sys.argv[sys.argv.index("--remote")+1]
except (IndexError, ValueError):
print "Remote registry calls can be tested using",
print "'test_winreg.py --remote \\\\machine_name'"
WinregTests.remote_name = None
test_main()
|
numerigraphe/odoo
|
refs/heads/8.0
|
openerp/addons/test_access_rights/models.py
|
299
|
from openerp import fields, models
class SomeObj(models.Model):
_name = 'test_access_right.some_obj'
val = fields.Integer()
|
hack4impact/vision-zero-philly
|
refs/heads/master
|
tests/test_incident.py
|
1
|
import unittest
import datetime
from app import create_app, db
from app.models import Incident, IncidentLocation, Agency, User
class IncidentTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_location_no_incident(self):
loc = IncidentLocation(
latitude='39.951039',
longitude='-75.197428',
original_user_text='3700 Spruce St.'
)
self.assertTrue(loc.latitude == '39.951039')
self.assertTrue(loc.longitude == '-75.197428')
self.assertTrue(loc.original_user_text == '3700 Spruce St.')
self.assertTrue(loc.incident_report is None)
def test_location_has_incident(self):
incident_report_1 = Incident(send_email_upon_creation=False)
incident_report_2 = Incident(send_email_upon_creation=False)
loc = IncidentLocation(
latitude='39.951039',
longitude='-75.197428',
original_user_text='3700 Spruce St.',
incident_report=incident_report_1
)
self.assertEqual(loc.incident_report, incident_report_1)
loc.incident_report = incident_report_2
self.assertEqual(loc.incident_report, incident_report_2)
def test_incident_no_location(self):
now = datetime.datetime.now()
incident = Incident(
pedestrian_num=0,
bicycle_num=1,
automobile_num=1,
date=now,
picture_url='http://google.com',
description='Truck idling on the road!',
send_email_upon_creation=False
)
self.assertEqual(incident.pedestrian_num, 0)
self.assertEqual(incident.bicycle_num, 1)
self.assertEqual(incident.automobile_num, 1)
self.assertEqual(incident.picture_url, 'http://google.com')
self.assertEqual(incident.description, 'Truck idling on the road!')
def test_incident_report_with_location_no_agency(self):
loc1 = Location(
latitude='39.951021',
longitude='-75.197243',
original_user_text='3700 Spruce St.'
)
loc2 = Location(
latitude='',
longitude='-75.197428',
original_user_text='3800 Spruce St.'
)
incident = Incident(
pedestrian_num=0,
bicycle_num=1,
automobile_num=1,
date=now,
loc=loc1
picture_url='http://google.com',
description='Truck idling on the road!',
send_email_upon_creation=False
)
self.assertEqual(incident.location, loc1)
incident.location = loc2
self.assertEqual(incident.location, loc2)
def test_incident_report_with_contact(self):
incident = Incident(
pedestrian_num=0,
bicycle_num=1,
automobile_num=1,
date=now,
loc=loc1
picture_url='http://google.com',
description='Truck idling on the road!',
contact_name="Bob",
contact_phone=1234567890,
contact_email="[email protected]",
send_email_upon_creation=False
)
self.assertEqual(incident.name, "Bob")
self.assertEqual(incident.contact_phone, 1234567890)
self.assertEqual(incident.contact_email, "[email protected]")
|
spookypeanut/trackinggeek
|
refs/heads/master
|
trackinggeek/point.py
|
1
|
# Tracking Geek: A tool for visualizing swathes of gpx files at once
# Copyright (C) 2012, Henry Bush
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class Point(object):
def __init__(self, lat, long):
""" A point on the Earth's surface
"""
self.lat = float(lat)
self.long = float(long)
def __repr__(self):
return "trackinggeek.Point(latitude=%s, longitude=%s)" % (self.lat,
self.long)
|
daxm/fmcapi
|
refs/heads/master
|
fmcapi/api_objects/policy_services/snmpalerts.py
|
19
|
"""Not yet implemented."""
|
SaM-Solutions/samba
|
refs/heads/master
|
source4/scripting/python/samba/netcmd/join.py
|
19
|
#!/usr/bin/env python
#
# joins
#
# Copyright Jelmer Vernooij 2010
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import samba.getopt as options
from samba.net import Net, LIBNET_JOIN_AUTOMATIC
from samba.netcmd import Command, CommandError, Option
from samba.dcerpc.misc import SEC_CHAN_WKSTA, SEC_CHAN_BDC
from samba.join import join_RODC, join_DC
class cmd_join(Command):
"""Joins domain as either member or backup domain controller [server connection needed]"""
synopsis = "%prog join <dnsdomain> [DC | RODC | MEMBER] [options]"
takes_optiongroups = {
"sambaopts": options.SambaOptions,
"versionopts": options.VersionOptions,
"credopts": options.CredentialsOptions,
}
takes_options = [
Option("--server", help="DC to join", type=str),
Option("--site", help="site to join", type=str),
]
takes_args = ["domain", "role?"]
def run(self, domain, role=None, sambaopts=None, credopts=None,
versionopts=None, server=None, site=None):
lp = sambaopts.get_loadparm()
creds = credopts.get_credentials(lp)
net = Net(creds, lp, server=credopts.ipaddress)
if site is None:
site = "Default-First-Site-Name"
netbios_name = lp.get("netbios name")
if not role is None:
role = role.upper()
if role is None or role == "MEMBER":
secure_channel_type = SEC_CHAN_WKSTA
elif role == "DC":
join_DC(server=server, creds=creds, lp=lp, domain=domain,
site=site, netbios_name=netbios_name)
return
elif role == "RODC":
join_RODC(server=server, creds=creds, lp=lp, domain=domain,
site=site, netbios_name=netbios_name)
return
else:
raise CommandError("Invalid role %s (possible values: MEMBER, BDC, RODC)" % role)
(join_password, sid, domain_name) = net.join(domain,
netbios_name,
secure_channel_type,
LIBNET_JOIN_AUTOMATIC)
self.outf.write("Joined domain %s (%s)\n" % (domain_name, sid))
|
eromoe/pyspider
|
refs/heads/master
|
pyspider/message_queue/beanstalk.py
|
12
|
#!/usr/bin/env python
# coding:utf-8
"""beanstalk queue - queue based on beanstalk
Setting: you need to set max-job-size bigger(default 65535)
DAEMON_OPTS="-l $BEANSTALKD_LISTEN_ADDR -p $BEANSTALKD_LISTEN_PORT -z 524288"
"""
import time
import umsgpack
import beanstalkc
import threading
import logging
from six.moves import queue as BaseQueue
class BeanstalkQueue(object):
max_timeout = 0.3
Empty = BaseQueue.Empty
Full = BaseQueue.Full
def __init__(self, name, host='localhost:11300', maxsize=0):
"""
Constructor for a BeanstalkdQueue.
"""
self.name = name
config = host.split(':')
self.host = config[0] if len(config) else 'localhost'
self.port = int(config[1]) if len(config) > 1 else 11300
self.lock = threading.RLock()
self.maxsize = maxsize
self.reconnect()
def stats(self):
try:
with self.lock:
stats = self.connection.stats_tube(self.name)
except beanstalkc.CommandFailed as err:
# tube is empty
if err[1] == 'NOT_FOUND':
return {}
stats = [item.split(': ') for item in stats.split('\n') if item.find(':')]
stats = [(item[0], item[1]) for item in stats if len(item) == 2]
return dict(stats)
def reconnect(self):
self.connection = beanstalkc.Connection(host=self.host, port=self.port, parse_yaml=False)
self.connection.use(self.name)
self.connection.watch(self.name)
def qsize(self):
stats = self.stats()
return int(stats.get('current-jobs-ready', 0))
def empty(self):
if self.qsize() == 0:
return True
else:
return False
def full(self):
if self.maxsize and self.qsize() >= self.maxsize:
return True
else:
return False
def put(self, obj, block=True, timeout=None):
if not block:
return self.put_nowait(obj)
start_time = time.time()
while True:
try:
return self.put_nowait(obj)
except BaseQueue.Full:
if timeout:
lasted = time.time() - start_time
if timeout > lasted:
time.sleep(min(self.max_timeout, timeout - lasted))
else:
raise
else:
time.sleep(self.max_timeout)
def put_nowait(self, obj):
if self.full():
raise BaseQueue.Full
with self.lock:
return self.connection.put(umsgpack.packb(obj))
def get(self, block=True, timeout=None):
if not block:
return self.get_nowait()
start_time = time.time()
while True:
try:
return self.get_nowait()
except BaseQueue.Empty:
if timeout:
lasted = time.time() - start_time
if timeout > lasted:
time.sleep(min(self.max_timeout, timeout - lasted))
else:
raise
else:
time.sleep(self.max_timeout)
def get_nowait(self):
try:
with self.lock:
job = self.connection.reserve(0)
if not job:
raise BaseQueue.Empty
else:
body = umsgpack.unpackb(job.body)
job.delete()
return body
except beanstalkc.DeadlineSoon:
raise BaseQueue.Empty
Queue = BeanstalkQueue
|
ogrisel/sklearn_pycon2014
|
refs/heads/master
|
notebooks/fig_code/linear_regression.py
|
63
|
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
def plot_linear_regression():
a = 0.5
b = 1.0
# x from 0 to 10
x = 30 * np.random.random(20)
# y = a*x + b with noise
y = a * x + b + np.random.normal(size=x.shape)
# create a linear regression classifier
clf = LinearRegression()
clf.fit(x[:, None], y)
# predict y from the data
x_new = np.linspace(0, 30, 100)
y_new = clf.predict(x_new[:, None])
# plot the results
ax = plt.axes()
ax.scatter(x, y)
ax.plot(x_new, y_new)
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.axis('tight')
if __name__ == '__main__':
plot_linear_regression()
plt.show()
|
SteerSuite/steersuite-rutgers
|
refs/heads/master
|
steerstats/tools/gameobjects/gametime.py
|
8
|
class GameClock(object):
"""Manages time in a game."""
def __init__(self, game_ticks_per_second=20):
"""Create a Game Clock object.
game_ticks_per_second -- The number of logic frames a second.
"""
self.game_ticks_per_second = float(game_ticks_per_second)
self.game_tick = 1. / self.game_ticks_per_second
self.speed = 1.
self.clock_time = 0.
self.virtual_time = 0.
self.game_time = 0.
self.game_frame_count = 0
self.real_time_passed = 0.
self.real_time = self.get_real_time()
self.started = False
self.paused = False
self.between_frame = 0.0
self.fps_sample_start_time = 0.0
self.fps_sample_count = 0
self.average_fps = 0
def start(self):
"""Starts the Game Clock. Must be called once."""
if self.started:
return
self.clock_time = 0.
self.virtual_time = 0.
self.game_time = 0.
self.game_frame_count = 0
self.real_time_passed = 0.
self.real_time = self.get_real_time()
self.started = True
self.fps = 0.0
self.fps_sample_start_time = self.real_time
self.fps_sample_count = 0
def set_speed(self, speed):
"""Sets the speed of the clock.
speed -- A time factor (1 is normal speed, 2 is twice normal)
"""
assert isinstance(speed, float), "Must be a float"
if speed < 0.0:
raise ValueError("Negative speeds not supported")
self.speed = speed
def pause(self):
"""Pauses the Game Clock."""
self.pause = True
def unpause(self):
"""Un-pauses the Game Clock."""
self.pause = False
def get_real_time(self):
"""Returns the real time, as reported by the system clock.
This method may be overriden."""
import time
return time.clock()
def get_fps(self):
"""Retrieves the current frames per second as a tuple containing
the fps and average fps over a second."""
return self.fps, self.average_fps
def get_between_frame(self):
"""Returns the interpolant between the previous game tick and the
next game tick."""
return self.between_frame
def update(self, max_updates = 0):
"""Advances time, must be called once per frame. Yields tuples of
game frame count and game time.
max_updates -- Maximum number of game time updates to issue.
"""
assert self.started, "You must call 'start' before using a GameClock."
real_time_now = self.get_real_time()
self.real_time_passed = real_time_now - self.real_time
self.real_time = real_time_now
self.clock_time += self.real_time_passed
if not self.paused:
self.virtual_time += self.real_time_passed * self.speed
update_count = 0
while self.game_time + self.game_tick < self.virtual_time:
self.game_frame_count += 1
self.game_time = self.game_frame_count * self.game_tick
yield (self.game_frame_count, self.game_time)
if max_updates and update_count == max_updates:
break
self.between_frame = ( self.virtual_time - self.game_time ) / self.game_tick
if self.real_time_passed != 0:
self.fps = 1.0 / self.real_time_passed
else:
self.fps = 0.0
self.fps_sample_count += 1
if self.real_time - self.fps_sample_start_time > 1.0:
self.average_fps = self.fps_sample_count / (self.real_time - self.fps_sample_start_time)
self.fps_sample_start_time = self.real_time
self.fps_sample_count = 0
if __name__ == "__main__":
import time
t = GameClock(20) # AI is 20 frames per second
t.start()
while t.virtual_time < 2.0:
for (frame_count, game_time) in t.update():
print "Game frame #%i, %2.4f" % (frame_count, game_time)
virtual_time = t.virtual_time
print "\t%2.2f%% between game frame, time is %2.4f"%(t.between_frame*100., virtual_time)
time.sleep(0.2) # Simulate time to render frame
|
heynemann/pyvows
|
refs/heads/master
|
pyvows/cli.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''PyVows' main entry point. Contains code for command-line I/O,
running tests, and the almighty `if __name__ == '__main__': main()`.
'''
# pyVows testing engine
# https://github.com/heynemann/pyvows
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 Bernardo Heynemann [email protected]
from __future__ import division, print_function
import argparse
import inspect
import os
from os.path import isfile, split
import sys
import tempfile
try:
from coverage import coverage
COVERAGE_AVAILABLE = True
except ImportError:
COVERAGE_AVAILABLE = False
from pyvows.color import yellow, Style, Fore
from pyvows.reporting import VowsDefaultReporter
from pyvows.reporting.xunit import XUnitReporter
from pyvows import version
#-------------------------------------------------------------------------------------------------
class Messages(object): # pragma: no cover
'''A simple container for command-line interface strings.'''
summary = 'Run PyVows tests.'
path = 'Directory to look for vows recursively. If a file is passed,' + \
'the file will be the target for vows. (default: %(default)r).'
pattern = 'Pattern of vows files. (default: %(default)r)'
verbosity = 'Verbosity. May be specified many times to increase verbosity (default: -vv)'
cover = 'Show the code coverage of tests. (default: %(default)s)'
cover_package = 'Verify coverage of %(metavar)s. May be specified many times. (default: all packages)'
cover_omit = 'Exclude %(metavar)s from coverage. May be specified many times. (default: no files)'
cover_threshold = 'Coverage below %(metavar)s is considered a failure. (default: %(default)s)'
cover_report = 'Store coverage report as %(metavar)s. (default: %(default)r)'
xunit_output = 'Enable XUnit output. (default: %(default)s)'
xunit_file = 'Store XUnit output as %(metavar)s. (default: %(default)r)'
exclude = 'Exclude tests and contexts that match regex-pattern %(metavar)s [Mutually exclusive with --include]'
include = 'Include only tests and contexts that match regex-pattern %(metavar)s [Mutually exclusive with --exclude]'
profile = 'Prints the 10 slowest topics. (default: %(default)s)'
profile_threshold = 'Tests taking longer than %(metavar)s seconds are considered slow. (default: %(default)s)'
no_color = 'Turn off colorized output. (default: %(default)s)'
progress = 'Show progress ticks during testing. (default: %(default)s)'
template = 'Print a PyVows test file template. (Disables testing)'
capture_output = 'Capture stdout and stderr during test execution (default: %(default)s)'
class Parser(argparse.ArgumentParser):
def __init__(self, description=Messages.summary, **kwargs):
super(Parser, self).__init__(
description=description,
**kwargs)
#Easy underlining, if we ever need it in the future
#uline = lambda text: '\033[4m{0}\033[24m'.format(text)
metavar = lambda metavar: '{0}{metavar}{0}'.format(Style.RESET_ALL, metavar=metavar.upper())
self.add_argument('-p', '--pattern', default='*_vows.py', help=Messages.pattern, metavar=metavar('pattern'))
### Filtering
self.add_argument('-e', '--exclude', action='append', default=[], help=Messages.exclude, metavar=metavar('exclude'))
self.add_argument('-i', '--include', action='append', default=[], help=Messages.include, metavar=metavar('include'))
### Coverage
cover_group = self.add_argument_group('Test Coverage')
cover_group.add_argument('-c', '--cover', action='store_true', default=False, help=Messages.cover)
cover_group.add_argument(
'-l', '--cover-package', action='append', default=[],
help=Messages.cover_package, metavar=metavar('package')
)
cover_group.add_argument(
'-o', '--cover-omit', action='append', default=[],
help=Messages.cover_omit, metavar=metavar('file')
)
cover_group.add_argument(
'-t', '--cover-threshold', type=float, default=80.0,
help=Messages.cover_threshold, metavar=metavar('number')
)
cover_group.add_argument(
'-r', '--cover-report', action='store', default=None,
help=Messages.cover_report, metavar=metavar('file')
)
### XUnit
xunit_group = self.add_argument_group('XUnit')
xunit_group.add_argument('-x', '--xunit-output', action='store_true', default=False, help=Messages.xunit_output)
xunit_group.add_argument(
'-f', '--xunit-file', action='store', default='pyvows.xml',
help=Messages.xunit_file, metavar=metavar('file')
)
### Profiling
profile_group = self.add_argument_group('Profiling')
profile_group.add_argument('--profile', action='store_true', dest='profile', default=False, help=Messages.profile)
profile_group.add_argument(
'--profile-threshold', type=float, default=0.1,
help=Messages.profile_threshold, metavar=metavar('num')
)
### Aux/Unconventional
aux_group = self.add_argument_group('Utility')
aux_group.add_argument('--template', action='store_true', dest='template', default=False, help=Messages.template)
### Misc
self.add_argument('--no-color', action='store_true', default=False, help=Messages.no_color)
self.add_argument('--progress', action='store_true', dest='progress', default=False, help=Messages.progress)
self.add_argument('--version', action='version', version='%(prog)s {0}'.format(version.to_str()))
self.add_argument('--capture-output', action='store_true', default=False, help=Messages.capture_output)
self.add_argument('-v', action='append_const', dest='verbosity', const=1, help=Messages.verbosity)
self.add_argument('path', nargs='?', default=os.curdir, help=Messages.path)
def run(path, pattern, verbosity, show_progress, exclusion_patterns=None, inclusion_patterns=None, capture_output=False):
# FIXME: Add Docstring
# This calls Vows.run(), which then calls VowsRunner.run()
# needs to be imported here, else the no-color option won't work
from pyvows.core import Vows
if exclusion_patterns:
Vows.exclude(exclusion_patterns)
if inclusion_patterns:
Vows.include(inclusion_patterns)
Vows.collect(path, pattern)
on_success = show_progress and VowsDefaultReporter.on_vow_success or None
on_error = show_progress and VowsDefaultReporter.on_vow_error or None
result = Vows.run(on_success, on_error, capture_output)
return result
def main():
'''PyVows' runtime implementation.
'''
# needs to be imported here, else the no-color option won't work
from pyvows.reporting import VowsDefaultReporter
arguments = Parser().parse_args()
if arguments.template:
from pyvows.utils import template
template()
sys.exit() # Exit after printing template, since it's
# supposed to be redirected from STDOUT by the user
path, pattern = arguments.path, arguments.pattern
if path and isfile(path):
path, pattern = split(path)
if not path:
path = os.curdir
if arguments.no_color:
for color_name, value in inspect.getmembers(Fore):
if not color_name.startswith('_'):
setattr(Fore, color_name, '')
if arguments.cover and COVERAGE_AVAILABLE:
cov = coverage(source=arguments.cover_package,
omit=arguments.cover_omit)
cov.erase()
cov.start()
verbosity = len(arguments.verbosity) if arguments.verbosity else 2
result = run(
path,
pattern,
verbosity,
arguments.progress,
exclusion_patterns=arguments.exclude,
inclusion_patterns=arguments.include,
capture_output=arguments.capture_output
)
reporter = VowsDefaultReporter(result, verbosity)
# Print test results first
reporter.pretty_print()
# Print profile if necessary
if arguments.profile:
reporter.print_profile(arguments.profile_threshold)
# Print coverage if necessary
if result.successful and arguments.cover:
# if coverage was requested, but unavailable, warn the user
if not COVERAGE_AVAILABLE:
print()
print(yellow('WARNING: Cover disabled because coverage could not be found.'))
print(yellow('Make sure it is installed and accessible.'))
print()
# otherwise, we're good
else:
cov.stop()
xml = ''
try:
with tempfile.NamedTemporaryFile() as tmp:
cov.xml_report(outfile=tmp.name)
tmp.seek(0)
xml = tmp.read()
except Exception:
err = sys.exc_info()[1]
print("Could not run coverage. Error: %s" % err)
if xml:
if arguments.cover_report:
with open(arguments.cover_report, 'wb') as report:
report.write(xml)
arguments.cover_threshold /= 100.0
reporter.print_coverage(xml, arguments.cover_threshold)
# Write XUnit if necessary
if arguments.xunit_output:
xunit = XUnitReporter(result)
xunit.write_report(arguments.xunit_file)
sys.exit(result.errored_tests)
if __name__ == '__main__':
main()
|
MrLoick/python-for-android
|
refs/heads/master
|
python3-alpha/extra_modules/gdata/apps/groups/client.py
|
48
|
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GroupsClient simplifies Groups Provisioning API calls.
GroupsClient extends gdata.client.GDClient to ease interaction
with the Group Provisioning API. These interactions include the
ability to create, retrieve, update and delete groups.
"""
__author__ = 'Shraddha gupta <[email protected]>'
import urllib.request, urllib.parse, urllib.error
import gdata.apps.groups.data
import gdata.client
# Multidomain URI templates
# The strings in this template are eventually replaced with the API version,
# and Google Apps domain name respectively.
GROUP_URI_TEMPLATE = '/a/feeds/group/%s/%s'
GROUP_MEMBER = 'member'
class GroupsProvisioningClient(gdata.client.GDClient):
"""Client extension for the Google Group Provisioning API service.
Attributes:
host: string The hostname for the Group Provisioning API service.
api_version: string The version of the MultiDomain Provisioning API.
"""
host = 'apps-apis.google.com'
api_version = '2.0'
auth_service = 'apps'
auth_scopes = gdata.gauth.AUTH_SCOPES['apps']
ssl = True
def __init__(self, domain, auth_token=None, **kwargs):
"""Constructs a new client for the Groups Provisioning API.
Args:
domain: string The Google Apps domain with Group Provisioning.
auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
OAuthToken which authorizes this client to edit the email settings.
kwargs: The other parameters to pass to the gdata.client.GDClient
constructor.
"""
gdata.client.GDClient.__init__(self, auth_token=auth_token, **kwargs)
self.domain = domain
def make_group_provisioning_uri(
self, feed_type=None, group_id=None, member_id=None, params=None):
"""Creates a resource feed URI for the Groups Provisioning API.
Using this client's Google Apps domain, create a feed URI for group
provisioning in that domain. If an email address is provided, return a
URI for that specific resource. If params are provided, append them as GET
params.
Args:
feed_type: string groupmember for groupmember feed else None
group_id: string (optional) The identifier of group for which to
make a feed URI.
member_id: string (optional) The identifier of group member for which to
make a feed URI.
params: dict (optional) key -> value params to append as GET vars to the
URI. Example: params={'start': 'my-resource-id'}
Returns:
A string giving the URI for group provisioning for this client's
Google Apps domain.
"""
uri = GROUP_URI_TEMPLATE % (self.api_version, self.domain)
if group_id:
uri += '/' + group_id
if feed_type is GROUP_MEMBER:
uri += '/' + feed_type
if member_id:
uri += '/' + member_id
if params:
uri += '?' + urllib.parse.urlencode(params)
return uri
MakeGroupProvisioningUri = make_group_provisioning_uri
def make_group_member_uri(self, group_id, member_id=None, params=None):
"""Creates a resource feed URI for the Group Member Provisioning API."""
return self.make_group_provisioning_uri(GROUP_MEMBER, group_id=group_id,
member_id=member_id, params=params)
MakeGroupMembersUri = make_group_member_uri
def RetrieveAllPages(self, feed, desired_class=gdata.data.GDFeed):
"""Retrieve all pages and add all elements.
Args:
feed: gdata.data.GDFeed object with linked elements.
desired_class: type of Feed to be returned.
Returns:
desired_class: subclass of gdata.data.GDFeed.
"""
next = feed.GetNextLink()
while next is not None:
next_feed = self.GetFeed(next.href, desired_class=desired_class)
for a_entry in next_feed.entry:
feed.entry.append(a_entry)
next = next_feed.GetNextLink()
return feed
def retrieve_page_of_groups(self, **kwargs):
"""Retrieves first page of groups for the given domain.
Args:
kwargs: The other parameters to pass to gdata.client.GDClient.GetFeed()
Returns:
A gdata.apps.groups.data.GroupFeed of the groups
"""
uri = self.MakeGroupProvisioningUri()
return self.GetFeed(uri,
desired_class=gdata.apps.groups.data.GroupFeed, **kwargs)
RetrievePageOfGroups = retrieve_page_of_groups
def retrieve_all_groups(self):
"""Retrieve all groups in this domain.
Returns:
gdata.apps.groups.data.GroupFeed of the groups
"""
groups_feed = self.RetrievePageOfGroups()
# pagination
return self.RetrieveAllPages(groups_feed, gdata.apps.groups.data.GroupFeed)
RetrieveAllGroups = retrieve_all_groups
def retrieve_group(self, group_id, **kwargs):
"""Retrieves a single group in the domain.
Args:
group_id: string groupId of the group to be retrieved
kwargs: other parameters to pass to gdata.client.GDClient.GetEntry()
Returns:
A gdata.apps.groups.data.GroupEntry representing the group
"""
uri = self.MakeGroupProvisioningUri(group_id=group_id)
return self.GetEntry(uri,
desired_class=gdata.apps.groups.data.GroupEntry, **kwargs)
RetrieveGroup = retrieve_group
def retrieve_page_of_member_groups(self, member_id, direct_only=False,
**kwargs):
"""Retrieve one page of groups that belong to the given member_id.
Args:
member_id: The member's email address (e.g. [email protected]).
direct_only: Boolean whether only return groups that this member
directly belongs to.
Returns:
gdata.apps.groups.data.GroupFeed of the groups.
"""
uri = self.MakeGroupProvisioningUri(params={'member':member_id,
'directOnly':direct_only})
return self.GetFeed(uri,
desired_class=gdata.apps.groups.data.GroupFeed, **kwargs)
RetrievePageOfMemberGroups = retrieve_page_of_member_groups
def retrieve_groups(self, member_id, direct_only=False, **kwargs):
"""Retrieve all groups that belong to the given member_id.
Args:
member_id: The member's email address (e.g. [email protected]).
direct_only: Boolean whether only return groups that this member
directly belongs to.
Returns:
gdata.apps.groups.data.GroupFeed of the groups
"""
groups_feed = self.RetrievePageOfMemberGroups(member_id=member_id,
direct_only=direct_only)
# pagination
return self.RetrieveAllPages(groups_feed, gdata.apps.groups.data.GroupFeed)
RetrieveGroups = retrieve_groups
def create_group(self, group_id, group_name,
description=None, email_permission=None, **kwargs):
"""Creates a group in the domain with the given properties.
Args:
group_id: string identifier of the group.
group_name: string name of the group.
description: string (optional) description of the group.
email_permission: string (optional) email permission level for the group.
kwargs: other parameters to pass to gdata.client.GDClient.post().
Returns:
A gdata.apps.groups.data.GroupEntry of the new group
"""
new_group = gdata.apps.groups.data.GroupEntry(group_id=group_id,
group_name=group_name, description=description,
email_permission=email_permission)
return self.post(new_group, self.MakeGroupProvisioningUri(),
**kwargs)
CreateGroup = create_group
def update_group(self, group_id, group_entry, **kwargs):
"""Updates the group with the given groupID.
Args:
group_id: string identifier of the group.
group_entry: GroupEntry The group entry with updated values.
kwargs: The other parameters to pass to gdata.client.GDClient.put()
Returns:
A gdata.apps.groups.data.GroupEntry representing the group
"""
return self.update(group_entry,
uri=self.MakeGroupProvisioningUri(group_id=group_id),
**kwargs)
UpdateGroup = update_group
def delete_group(self, group_id, **kwargs):
"""Deletes the group with the given groupId.
Args:
group_id: string groupId of the group to delete.
kwargs: The other parameters to pass to gdata.client.GDClient.delete()
"""
self.delete(self.MakeGroupProvisioningUri(group_id=group_id), **kwargs)
DeleteGroup = delete_group
def retrieve_page_of_members(self, group_id, **kwargs):
"""Retrieves first page of group members of the group.
Args:
group_id: string groupId of the group whose members are retrieved
kwargs: The other parameters to pass to gdata.client.GDClient.GetFeed()
Returns:
A gdata.apps.groups.data.GroupMemberFeed of the GroupMember entries
"""
uri = self.MakeGroupMembersUri(group_id=group_id)
return self.GetFeed(uri,
desired_class=gdata.apps.groups.data.GroupMemberFeed, **kwargs)
RetrievePageOfMembers = retrieve_page_of_members
def retrieve_all_members(self, group_id, **kwargs):
"""Retrieve all members of the group.
Returns:
gdata.apps.groups.data.GroupMemberFeed
"""
group_member_feed = self.RetrievePageOfMembers(group_id=group_id)
# pagination
return self.RetrieveAllPages(group_member_feed,
gdata.apps.groups.data.GroupMemberFeed)
RetrieveAllMembers = retrieve_all_members
def retrieve_group_member(self, group_id, member_id, **kwargs):
"""Retrieves a group member with the given id from given group.
Args:
group_id: string groupId of the group whose member is retrieved
member_id: string memberId of the group member retrieved
kwargs: The other parameters to pass to gdata.client.GDClient.GetEntry()
Returns:
A gdata.apps.groups.data.GroupEntry representing the group member
"""
uri = self.MakeGroupMembersUri(group_id=group_id, member_id=member_id)
return self.GetEntry(uri,
desired_class=gdata.apps.groups.data.GroupMemberEntry, **kwargs)
RetrieveGroupMember = retrieve_group_member
def add_member_to_group(self, group_id, member_id, member_type=None,
direct_member=None, **kwargs):
"""Adds a member with the given id to the group.
Args:
group_id: string groupId of the group where member is added
member_id: string memberId of the member added
member_type: string (optional) type of member(user or group)
direct_member: bool (optional) if member is a direct member
kwargs: The other parameters to pass to gdata.client.GDClient.post().
Returns:
A gdata.apps.groups.data.GroupMemberEntry of the group member
"""
member = gdata.apps.groups.data.GroupMemberEntry(member_id=member_id,
member_type=member_type, direct_member=direct_member)
return self.post(member, self.MakeGroupMembersUri(group_id=group_id),
**kwargs)
AddMemberToGroup = add_member_to_group
def remove_member_from_group(self, group_id, member_id, **kwargs):
"""Remove member from the given group.
Args:
group_id: string groupId of the group
member_id: string memberId of the member to be removed
kwargs: The other parameters to pass to gdata.client.GDClient.delete()
"""
self.delete(
self.MakeGroupMembersUri(group_id=group_id, member_id=member_id),
**kwargs)
RemoveMemberFromGroup = remove_member_from_group
|
lupyuen/RaspberryPiImage
|
refs/heads/master
|
home/pi/GrovePi/Software/Python/others/temboo/Library/Stripe/Events/ListAllEvents.py
|
5
|
# -*- coding: utf-8 -*-
###############################################################################
#
# ListAllEvents
# Returns a list of events that have happened in your account.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class ListAllEvents(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the ListAllEvents Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(ListAllEvents, self).__init__(temboo_session, '/Library/Stripe/Events/ListAllEvents')
def new_input_set(self):
return ListAllEventsInputSet()
def _make_result_set(self, result, path):
return ListAllEventsResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return ListAllEventsChoreographyExecution(session, exec_id, path)
class ListAllEventsInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the ListAllEvents
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((required, string) The API Key provided by Stripe)
"""
super(ListAllEventsInputSet, self)._set_input('APIKey', value)
def set_Count(self, value):
"""
Set the value of the Count input for this Choreo. ((optional, integer) A limit on the number of events to be returned. Count can range between 1 and 100 items.)
"""
super(ListAllEventsInputSet, self)._set_input('Count', value)
def set_Created(self, value):
"""
Set the value of the Created input for this Choreo. ((optional, date) Filters the result based on the event created date (a UTC timestamp).)
"""
super(ListAllEventsInputSet, self)._set_input('Created', value)
def set_GreaterThanEqualTo(self, value):
"""
Set the value of the GreaterThanEqualTo input for this Choreo. ((optional, date) Returns events that have been created after or equal to this UTC timestamp.)
"""
super(ListAllEventsInputSet, self)._set_input('GreaterThanEqualTo', value)
def set_GreaterThan(self, value):
"""
Set the value of the GreaterThan input for this Choreo. ((optional, date) Returns events that have been created after this UTC timestamp.)
"""
super(ListAllEventsInputSet, self)._set_input('GreaterThan', value)
def set_LessThanEqualTo(self, value):
"""
Set the value of the LessThanEqualTo input for this Choreo. ((optional, date) Return events that were created before or equal to this UTC timestamp.)
"""
super(ListAllEventsInputSet, self)._set_input('LessThanEqualTo', value)
def set_LessThan(self, value):
"""
Set the value of the LessThan input for this Choreo. ((optional, date) Return events that were created before this UTC timestamp.)
"""
super(ListAllEventsInputSet, self)._set_input('LessThan', value)
def set_Offset(self, value):
"""
Set the value of the Offset input for this Choreo. ((optional, integer) An offset into your events array. The API will return the requested number of events starting at that offset.)
"""
super(ListAllEventsInputSet, self)._set_input('Offset', value)
def set_Type(self, value):
"""
Set the value of the Type input for this Choreo. ((optional, string) A string containing a specific event name, or group of events using * as a wildcard. This will return a filtered result including only events with a matching event property.)
"""
super(ListAllEventsInputSet, self)._set_input('Type', value)
class ListAllEventsResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the ListAllEvents Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Stripe)
"""
return self._output.get('Response', None)
class ListAllEventsChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return ListAllEventsResultSet(response, path)
|
Lindem-Data-Acquisition-AS/TM4C129-discontinued
|
refs/heads/master
|
libraries/FreeRTOSv8.0.1/FreeRTOS-Plus/Source/CyaSSL/swig/runme.py
|
13
|
# file: runme.py
import cyassl
print ""
print "Trying to connect to the echo server..."
cyassl.CyaSSL_Init()
#cyassl.CyaSSL_Debugging_ON()
ctx = cyassl.CyaSSL_CTX_new(cyassl.CyaTLSv1_client_method())
if ctx == None:
print "Couldn't get SSL CTX for TLSv1"
exit(-1)
ret = cyassl.CyaSSL_CTX_load_verify_locations(ctx, "../certs/ca-cert.pem", None)
if ret != cyassl.SSL_SUCCESS:
print "Couldn't do SSL_CTX_load_verify_locations "
print "error string = ", ret
exit(-1)
ssl = cyassl.CyaSSL_new(ctx)
ret = cyassl.CyaSSL_swig_connect(ssl, "localhost", 11111)
if ret != cyassl.SSL_SUCCESS:
print "Couldn't do SSL connect"
err = cyassl.CyaSSL_get_error(ssl, 0)
print "error string = ", cyassl.CyaSSL_error_string(err)
exit(-1)
print "...Connected"
written = cyassl.CyaSSL_write(ssl, "hello from python\r\n", 19)
if written > 0:
print "Wrote ", written, " bytes"
byteArray = cyassl.byteArray(100)
readBytes = cyassl.CyaSSL_read(ssl, byteArray, 100)
print "server reply: ", cyassl.cdata(byteArray, readBytes)
|
jat255/hyperspy
|
refs/heads/RELEASE_next_minor
|
hyperspy/utils/peakfinders2D.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright 2007-2020 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import scipy.ndimage as ndi
from numba import jit
from skimage.feature import (
peak_local_max, blob_dog, blob_log, corner_peaks, match_template)
import copy
NO_PEAKS = np.array([[np.nan, np.nan]])
@jit(nopython=True, cache=True)
def _fast_mean(X):
"""JIT-compiled mean of array.
Parameters
----------
X : numpy.ndarray
Input array.
Returns
-------
mean : float
Mean of X.
Notes
-----
Used by scipy.ndimage.generic_filter in the find_peaks_stat
method to reduce overhead of repeated Python function calls.
See https://github.com/scipy/scipy/issues/8916 for more details.
"""
return np.mean(X)
@jit(nopython=True, cache=True)
def _fast_std(X):
"""JIT-compiled standard deviation of array.
Parameters
----------
X : numpy.ndarray
Input array.
Returns
-------
std : float
Standard deviation of X.
Notes
-----
Used by scipy.ndimage.generic_filter in the find_peaks_stat
method to reduce overhead of repeated Python function calls.
See https://github.com/scipy/scipy/issues/8916 for more details.
"""
return np.std(X)
def clean_peaks(peaks):
"""Sort array of peaks and deal with no peaks being found.
Parameters
----------
peaks : numpy.ndarray
Array of found peaks.
Returns
-------
peaks : numpy.ndarray
Sorted array, first by `peaks[:,1]` (y-coordinate) then by `peaks[:,0]`
(x-coordinate), of found peaks.
NO_PEAKS : str
Flag indicating no peaks found.
"""
if len(peaks) == 0:
return NO_PEAKS
else:
ind = np.lexsort((peaks[:,0], peaks[:,1]))
return peaks[ind]
def find_local_max(z, **kwargs):
"""Method to locate positive peaks in an image by local maximum searching.
This function wraps :py:func:`skimage.feature.peak_local_max` function and
sorts the results for consistency with other peak finding methods.
z : numpy.ndarray
Array of image intensities.
**kwargs
Keyword arguments to be passed to the ``peak_local_max`` method of
the ``scikit-image`` library. See its documentation for details:
http://scikit-image.org/docs/dev/api/skimage.feature.html#peak-local-max
Returns
-------
peaks : numpy.ndarray
(n_peaks, 2)
Peak pixel coordinates.
"""
peaks = peak_local_max(z, **kwargs)
return clean_peaks(peaks)
def find_peaks_minmax(z, distance=5., threshold=10.):
"""Method to locate the positive peaks in an image by comparing maximum
and minimum filtered images.
Parameters
----------
z : numpy.ndarray
Matrix of image intensities.
distance : float
Expected distance between peaks.
threshold : float
Minimum difference between maximum and minimum filtered images.
Returns
-------
peaks : numpy.ndarray
(n_peaks, 2)
Peak pixel coordinates.
"""
data_max = ndi.filters.maximum_filter(z, distance)
maxima = (z == data_max)
data_min = ndi.filters.minimum_filter(z, distance)
diff = ((data_max - data_min) > threshold)
maxima[diff == 0] = 0
labeled, num_objects = ndi.label(maxima)
peaks = np.array(
ndi.center_of_mass(z, labeled, range(1, num_objects + 1)))
return clean_peaks(np.round(peaks).astype(int))
def find_peaks_max(z, alpha=3., distance=10):
"""Method to locate positive peaks in an image by local maximum searching.
Parameters
----------
alpha : float
Only maxima above `alpha * sigma` are found, where `sigma` is the
standard deviation of the image.
distance : int
When a peak is found, all pixels in a square region of side
`2 * distance` are set to zero so that no further peaks can be found
in that region.
Returns
-------
peaks : numpy.ndarray
(n_peaks, 2)
Peak pixel coordinates.
"""
# preallocate lots of peak storage
k_arr = []
# copy image
image_temp = copy.deepcopy(z)
peak_ct = 0
# calculate standard deviation of image for thresholding
sigma = np.std(z)
while True:
k = np.argmax(image_temp)
j, i = np.unravel_index(k, image_temp.shape)
if image_temp[j, i] >= alpha * sigma:
k_arr.append([j, i])
# masks peaks already identified.
x = np.arange(i - distance, i + distance)
y = np.arange(j - distance, j + distance)
xv, yv = np.meshgrid(x, y)
# clip to handle peaks near image edge
image_temp[yv.clip(0, image_temp.shape[0] - 1),
xv.clip(0, image_temp.shape[1] - 1)] = 0
peak_ct += 1
else:
break
peaks = np.array(k_arr)
return clean_peaks(peaks)
def find_peaks_zaefferer(z, grad_threshold=0.1, window_size=40,
distance_cutoff=50.):
"""Method to locate positive peaks in an image based on gradient
thresholding and subsequent refinement within masked regions.
Parameters
----------
z : ndarray
Matrix of image intensities.
grad_threshold : float
The minimum gradient required to begin a peak search.
window_size : int
The size of the square window within which a peak search is
conducted. If odd, will round down to even. The size must be larger
than 2.
distance_cutoff : float
The maximum distance a peak may be from the initial
high-gradient point.
Returns
-------
peaks : numpy.ndarray
(n_peaks, 2)
Peak pixel coordinates.
Notes
-----
Implemented as described in Zaefferer "New developments of computer-aided
crystallographic analysis in transmission electron microscopy" J. Ap. Cryst.
This version by Ben Martineau (2016)
"""
def box(x, y, window_size, x_max, y_max):
"""Produces a list of coordinates in the box about (x, y)."""
a = int(window_size / 2)
x_min = max(0, x - a)
x_max = min(x_max, x + a)
y_min = max(0, y - a)
y_max = min(y_max, y + a)
return np.mgrid[x_min:x_max, y_min:y_max].reshape(2, -1, order="F")
def get_max(image, box):
"""Finds the coordinates of the maximum of 'image' in 'box'."""
vals = image[tuple(box)]
ind = np.argmax(vals)
return tuple(box[:, ind])
def squared_distance(x, y):
"""Calculates the squared distance between two points."""
return (x[0] - y[0]) ** 2 + (x[1] - y[1]) ** 2
def gradient(image):
"""Calculates the square of the 2-d partial gradient.
Parameters
----------
image : numpy.ndarray
The image for which the gradient will be calculated.
Returns
-------
gradient_of_image : numpy.ndarray
The gradient of the image.
"""
gradient_of_image = np.gradient(image)
gradient_of_image = gradient_of_image[0] ** 2 + gradient_of_image[
1] ** 2
return gradient_of_image
# Check window size is appropriate.
if window_size < 2:
raise ValueError("`window_size` must be >= 2.")
# Generate an ordered list of matrix coordinates.
if len(z.shape) != 2:
raise ValueError("'z' should be a 2-d image matrix.")
z = z / np.max(z)
coordinates = np.indices(z.data.shape).reshape(2, -1).T
# Calculate the gradient at every point.
image_gradient = gradient(z)
# Boolean matrix of high-gradient points.
coordinates = coordinates[(image_gradient >= grad_threshold).flatten()]
# Compare against squared distance (avoids repeated sqrt calls)
distance_cutoff_sq = distance_cutoff ** 2
peaks = []
for coordinate in coordinates:
# Iterate over coordinates where the gradient is high enough.
b = box(coordinate[0], coordinate[1], window_size, z.shape[0],
z.shape[1])
p_old = (0, 0)
p_new = get_max(z, b)
while p_old[0] != p_new[0] and p_old[1] != p_new[1]:
p_old = p_new
b = box(p_old[0], p_old[1], window_size, z.shape[0], z.shape[1])
p_new = get_max(z, b)
if squared_distance(coordinate, p_new) > distance_cutoff_sq:
break
peaks.append(p_new)
peaks = np.array([p for p in set(peaks)])
return clean_peaks(peaks)
def find_peaks_stat(z, alpha=1.0, window_radius=10, convergence_ratio=0.05):
"""Method to locate positive peaks in an image based on statistical
refinement and difference with respect to mean intensity.
Parameters
----------
z : numpy.ndarray
Array of image intensities.
alpha : float
Only maxima above `alpha * sigma` are found, where `sigma` is the
local, rolling standard deviation of the image.
window_radius : int
The pixel radius of the circular window for the calculation of the
rolling mean and standard deviation.
convergence_ratio : float
The algorithm will stop finding peaks when the proportion of new peaks
being found is less than `convergence_ratio`.
Returns
-------
peaks : numpy.ndarray
(n_peaks, 2)
Peak pixel coordinates.
Notes
-----
Implemented as described in the PhD thesis of Thomas White, University of
Cambridge, 2009, with minor modifications to resolve ambiguities.
The algorithm is as follows:
1. Adjust the contrast and intensity bias of the image so that all pixels
have values between 0 and 1.
2. For each pixel, determine the mean and standard deviation of all pixels
inside a circle of radius 10 pixels centered on that pixel.
3. If the value of the pixel is greater than the mean of the pixels in the
circle by more than one standard deviation, set that pixel to have an
intensity of 1. Otherwise, set the intensity to 0.
4. Smooth the image by convovling it twice with a flat 3x3 kernel.
5. Let k = (1/2 - mu)/sigma where mu and sigma are the mean and standard
deviations of all the pixel intensities in the image.
6. For each pixel in the image, if the value of the pixel is greater than
mu + k*sigma set that pixel to have an intensity of 1. Otherwise, set the
intensity to 0.
7. Detect peaks in the image by locating the centers of gravity of regions
of adjacent pixels with a value of 1.
8. Repeat #4-7 until the number of peaks found in the previous step
converges to within the user defined convergence_ratio.
"""
try:
from sklearn.cluster import DBSCAN
except ImportError:
raise ImportError("This method requires scikit-learn.")
def normalize(image):
"""Scales the image to intensities between 0 and 1."""
return image / np.max(image)
def _local_stat(image, radius, func):
"""Calculates rolling method 'func' over a circular kernel."""
x, y = np.ogrid[-radius : radius + 1, -radius : radius + 1]
kernel = np.hypot(x, y) < radius
stat = ndi.filters.generic_filter(image, func, footprint=kernel)
return stat
def local_mean(image, radius):
"""Calculates rolling mean over a circular kernel."""
return _local_stat(image, radius, _fast_mean)
def local_std(image, radius):
"""Calculates rolling standard deviation over a circular kernel."""
return _local_stat(image, radius, _fast_std)
def single_pixel_desensitize(image):
"""Reduces single-pixel anomalies by nearest-neighbor smoothing."""
kernel = np.array([[0.5, 1, 0.5], [1, 1, 1], [0.5, 1, 0.5]])
smoothed_image = ndi.filters.generic_filter(image, _fast_mean, footprint=kernel)
return smoothed_image
def stat_binarise(image):
"""Peaks more than one standard deviation from the mean set to one."""
image_rolling_mean = local_mean(image, window_radius)
image_rolling_std = local_std(image, window_radius)
image = single_pixel_desensitize(image)
binarised_image = np.zeros(image.shape)
stat_mask = image > (image_rolling_mean + alpha * image_rolling_std)
binarised_image[stat_mask] = 1
return binarised_image
def smooth(image):
"""Image convolved twice using a uniform 3x3 kernel."""
image = ndi.filters.uniform_filter(image, size=3)
image = ndi.filters.uniform_filter(image, size=3)
return image
def half_binarise(image):
"""Image binarised about values of one-half intensity."""
binarised_image = np.where(image > 0.5, 1, 0)
return binarised_image
def separate_peaks(binarised_image):
"""Identify adjacent 'on' coordinates via DBSCAN."""
bi = binarised_image.astype("bool")
coordinates = np.indices(bi.shape).reshape(2, -1).T[bi.flatten()]
db = DBSCAN(2, 3)
peaks = []
if coordinates.shape[0] > 0: # we have at least some peaks
labeled_points = db.fit_predict(coordinates)
for peak_label in list(set(labeled_points)):
peaks.append(coordinates[labeled_points == peak_label])
return peaks
def _peak_find_once(image):
"""Smooth, binarise, and find peaks according to main algorithm."""
image = smooth(image) # 4
image = half_binarise(image) # 5
peaks = separate_peaks(image) # 6
centers = np.array([np.mean(peak, axis=0) for peak in peaks]) # 7
return image, centers
def stat_peak_finder(image, convergence_ratio):
"""Find peaks in image. Algorithm stages in comments."""
# Image preparation
image = normalize(image) # 1
image = stat_binarise(image) # 2, 3
# Perform first iteration of peak finding
image, peaks_curr = _peak_find_once(image) # 4-7
n_peaks = len(peaks_curr)
if n_peaks == 0:
return peaks_curr
m_peaks = 0
# Repeat peak finding with more blurring to convergence
while (n_peaks - m_peaks) / n_peaks > convergence_ratio: # 8
m_peaks = n_peaks
peaks_old = np.copy(peaks_curr)
image, peaks_curr = _peak_find_once(image)
n_peaks = len(peaks_curr)
if n_peaks == 0:
return peaks_old
return peaks_curr
return clean_peaks(stat_peak_finder(z, convergence_ratio))
def find_peaks_dog(z, min_sigma=1., max_sigma=50., sigma_ratio=1.6,
threshold=0.2, overlap=0.5, exclude_border=False):
"""Method to locate peaks via the Difference of Gaussian Matrices method.
This function wraps :py:func:`skimage.feature.blob_dog` function and
sorts the results for consistency with other peak finding methods.
Parameters
----------
z : numpy.ndarray
2-d array of intensities
min_sigma, max_sigma, sigma_ratio, threshold, overlap, exclude_border :
Additional parameters to be passed to the algorithm. See `blob_dog`
documentation for details:
http://scikit-image.org/docs/dev/api/skimage.feature.html#blob-dog
Returns
-------
peaks : numpy.ndarray
(n_peaks, 2)
Peak pixel coordinates.
Notes
-----
While highly effective at finding even very faint peaks, this method is
sensitive to fluctuations in intensity near the edges of the image.
"""
z = z / np.max(z)
blobs = blob_dog(z, min_sigma=min_sigma, max_sigma=max_sigma,
sigma_ratio=sigma_ratio, threshold=threshold,
overlap=overlap, exclude_border=exclude_border)
try:
centers = np.round(blobs[:, :2]).astype(int)
except IndexError:
return NO_PEAKS
clean_centers = []
for center in centers:
if len(np.intersect1d(center, (0, 1) + z.shape + tuple(
c - 1 for c in z.shape))) > 0:
continue
clean_centers.append(center)
peaks = np.array(clean_centers)
ind = np.lexsort((peaks[:,0], peaks[:,1]))
return peaks[ind]
def find_peaks_log(z, min_sigma=1., max_sigma=50., num_sigma=10,
threshold=0.2, overlap=0.5, log_scale=False,
exclude_border=False):
"""Method to locate peaks via the Laplacian of Gaussian Matrices method.
This function wraps :py:func:`skimage.feature.blob_log` function and
sorts the results for consistency with other peak finding methods.
Parameters
----------
z : numpy.ndarray
Array of image intensities.
min_sigma, max_sigma, num_sigma, threshold, overlap, log_scale, exclude_border :
Additional parameters to be passed to the ``blob_log`` method of the
``scikit-image`` library. See its documentation for details:
http://scikit-image.org/docs/dev/api/skimage.feature.html#blob-log
Returns
-------
peaks : numpy.ndarray
(n_peaks, 2)
Peak pixel coordinates.
"""
z = z / np.max(z)
if isinstance(num_sigma, float):
raise ValueError("`num_sigma` parameter should be an integer.")
blobs = blob_log(z, min_sigma=min_sigma, max_sigma=max_sigma,
num_sigma=num_sigma, threshold=threshold, overlap=overlap,
log_scale=log_scale, exclude_border=exclude_border)
# Attempt to return only peak positions. If no peaks exist, return an
# empty array.
try:
centers = np.round(blobs[:, :2]).astype(int)
ind = np.lexsort((centers[:,0], centers[:,1]))
except IndexError:
return NO_PEAKS
return centers[ind]
def find_peaks_xc(z, template, distance=5, threshold=0.5, **kwargs):
"""Find peaks in the cross correlation between the image and a template by
using the :py:func:`~hyperspy.utils.peakfinders2D.find_peaks_minmax` function
to find the peaks on the cross correlation result obtained using the
:py:func:`skimage.feature.match_template` function.
Parameters
----------
z : numpy.ndarray
Array of image intensities.
template : numpy.ndarray (square)
Array containing a single bright disc, similar to those to detect.
distance : float
Expected distance between peaks.
threshold : float
Minimum difference between maximum and minimum filtered images.
**kwargs : dict
Keyword arguments to be passed to the
:py:func:`skimage.feature.match_template` function.
Returns
-------
numpy.ndarray
(n_peaks, 2)
Array of peak coordinates.
"""
pad_input = kwargs.pop('pad_input', True)
response_image = match_template(z, template, pad_input=pad_input, **kwargs)
peaks = find_peaks_minmax(response_image,
distance=distance,
threshold=threshold)
return clean_peaks(peaks)
|
sunzhxjs/JobGIS
|
refs/heads/master
|
lib/python2.7/site-packages/setuptools/launch.py
|
50
|
"""
Launch the Python script on the command line after
setuptools is bootstrapped via import.
"""
# Note that setuptools gets imported implicitly by the
# invocation of this script using python -m setuptools.launch
import tokenize
import sys
def run():
"""
Run the script in sys.argv[1] as if it had
been invoked naturally.
"""
__builtins__
script_name = sys.argv[1]
namespace = dict(
__file__ = script_name,
__name__ = '__main__',
__doc__ = None,
)
sys.argv[:] = sys.argv[1:]
open_ = getattr(tokenize, 'open', open)
script = open_(script_name).read()
norm_script = script.replace('\\r\\n', '\\n')
code = compile(norm_script, script_name, 'exec')
exec(code, namespace)
if __name__ == '__main__':
run()
|
alanplotko/CoRE-Manager
|
refs/heads/master
|
app.py
|
2
|
# Flask
from flask import Flask, render_template, request, redirect, url_for, session, abort, make_response
# Authentication
from authomatic.adapters import WerkzeugAdapter
from authomatic import Authomatic
from config import CONFIG
# MongoDB and Sessions
from flask.ext.session import Session
from pymongo import MongoClient
from functools import wraps
from datetime import datetime
from time import time
# Miscellaneous
import os, logging, json, sys
tmpl_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'templates')
# MongoDB Setup
client = MongoClient(os.getenv('COREDASH_MONGOHQ_URL'))
db = client.core
# MongoDB Session Setup
SESSION_TYPE = 'mongodb'
SESSION_MONGODB = client
SESSION_MONGODB_DB = os.getenv('COREDASH_MONGOHQ_DB')
SESSION_MONGODB_COLLECT = os.getenv('COREDASH_MONGOHQ_SESSIONS')
SESSION_USE_SIGNER = True
SESSION_KEY_PREFIX = os.getenv('COREDASH_MONGOHQ_PREFIX')
# Instantiate Authomatic Object and set up app
app = Flask(__name__)
app.secret_key = os.getenv('COREDASH_APP_SECRET')
authomatic = Authomatic(config=CONFIG, secret=app.secret_key)
app.config.from_object(__name__)
Session(app)
@app.before_first_request
def setup_logging():
if not app.debug:
# In production mode, add log handler to sys.stderr.
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
def getCredentials():
credentials = session.get('credentials', None)
if credentials:
credentials = authomatic.credentials(credentials)
return credentials
return None
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
credentials = getCredentials()
if not credentials or not credentials.valid:
return redirect(url_for('login', next=request.url))
# If credentials are valid and expire in 30 minutes, refresh
elif credentials and credentials.valid and credentials.expire_soon(30 * 60):
response = credentials.refresh()
return f(*args, **kwargs)
return decorated_function
@app.route('/')
def index():
credentials = getCredentials()
if credentials and credentials.valid:
return redirect(url_for('dashboard'))
return render_template('index.html', template_folder=tmpl_dir)
@app.route('/login')
def login():
credentials = getCredentials()
if credentials and credentials.valid:
return redirect(url_for('dashboard'))
return render_template('login.html', template_folder=tmpl_dir, credentials=credentials)
@app.route('/oauth2callback', methods=['GET', 'POST'])
def authenticate():
# We need response object for the WerkzeugAdapter.
response = make_response()
# Log the user in, pass it the adapter and the provider name.
result = authomatic.login(
WerkzeugAdapter(request, response),
"google",
session=session,
session_saver=app.save_session(session, response)
)
# If there is no LoginResult object, the login procedure is still pending
if result:
if result.user:
# We need to update the user to get more info
result.user.update()
# Store authomatic credentials in session
session['credentials'] = authomatic.credentials(result.user.credentials).serialize()
# Create new account if user is not found
account = db.users.find_one({'email': result.user.email })
if account == None:
del session['credentials']
return make_response(render_template('error.html', template_folder=tmpl_dir, error=401, error_msg="Unauthorized",
return_home="We couldn't find you on the CoRE member list. You must be a CoRE member to access \
CoREdash. Check with the secretary if you believe this is a mistake."), 401)
else:
# Store user information in session
session['username'] = result.user.email
if account.get('name') is None:
db.users.update({ 'email': result.user.email }, { '$set': { 'name': result.user.name } }, upsert=False)
session['display_name'] = result.user.name.split(' ')[0]
credentials = getCredentials()
return render_template('process_login.html')
# Don't forget to return the response
return response
@app.route('/logout')
def logout():
credentials = getCredentials()
if credentials and credentials.valid:
db.sessions.remove({ "id": app.config.get('SESSION_KEY_PREFIX') + session.sid })
session.clear()
return redirect(url_for('index'))
@app.route('/dashboard')
@login_required
def dashboard():
credentials = getCredentials()
return render_template('dashboard.html', template_folder=tmpl_dir, credentials=credentials)
@app.errorhandler(401)
def unauthorized(error):
return render_template('error.html', template_folder=tmpl_dir, error=401, error_msg="Unauthorized",
return_home="You must be a CoRE member to access this page!"
)
@app.errorhandler(500)
def internal_server(e):
return render_template('error.html', template_folder=tmpl_dir, error=500, error_msg="Internal Server Error",
return_home="The gears must have gotten stuck. Let us know if it happens again!"
)
@app.errorhandler(404)
def page_not_found(e):
return render_template('error.html', template_folder=tmpl_dir, error=404, error_msg="Page Not Found",
return_home="We can't find what you're looking for."
)
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
|
dkubiak789/OpenUpgrade
|
refs/heads/8.0
|
addons/document/odt2txt.py
|
435
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sys, zipfile, xml.dom.minidom
import StringIO
class OpenDocumentTextFile :
def __init__ (self, filepath):
zip = zipfile.ZipFile(filepath)
self.content = xml.dom.minidom.parseString(zip.read("content.xml"))
def toString (self):
""" Converts the document to a string. """
buffer = u""
for val in ["text:p", "text:h", "text:list"]:
for paragraph in self.content.getElementsByTagName(val) :
buffer += self.textToString(paragraph) + "\n"
return buffer
def textToString(self, element):
buffer = u""
for node in element.childNodes :
if node.nodeType == xml.dom.Node.TEXT_NODE :
buffer += node.nodeValue
elif node.nodeType == xml.dom.Node.ELEMENT_NODE :
buffer += self.textToString(node)
return buffer
if __name__ == "__main__" :
s =StringIO.StringIO(file(sys.argv[1]).read())
odt = OpenDocumentTextFile(s)
print odt.toString().encode('ascii','replace')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
kseistrup/qtile
|
refs/heads/develop
|
test/layouts/layout_utils.py
|
11
|
# Copyright (c) 2011 Florian Mounier
# Copyright (c) 2012, 2014-2015 Tycho Andersen
# Copyright (c) 2013 Mattias Svala
# Copyright (c) 2013 Craig Barnes
# Copyright (c) 2014 ramnes
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014 Adi Sieker
# Copyright (c) 2014 Chris Wesseling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
def assertFocused(self, name):
"""Asserts that window with specified name is currently focused"""
info = self.c.window.info()
assert info['name'] == name, 'Got {0!r}, expected {1!r}'.format(
info['name'], name)
def assertDimensions(self, x, y, w, h, win=None):
"""Asserts dimensions of window"""
if win is None:
win = self.c.window
info = win.info()
assert info['x'] == x, info
assert info['y'] == y, info
assert info['width'] == w, info # why?
assert info['height'] == h, info
def assertFocusPath(self, *names):
for i in names:
self.c.group.next_window()
assertFocused(self, i)
# let's check twice for sure
for i in names:
self.c.group.next_window()
assertFocused(self, i)
# Ok, let's check backwards now
for i in reversed(names):
assertFocused(self, i)
self.c.group.prev_window()
# and twice for sure
for i in reversed(names):
assertFocused(self, i)
self.c.group.prev_window()
|
Ayub-Khan/edx-platform
|
refs/heads/master
|
common/lib/chem/chem/miller.py
|
182
|
""" Calculation of Miller indices """
import numpy as np
import math
import fractions as fr
import decimal
import json
def lcm(a, b):
"""
Returns least common multiple of a, b
Args:
a, b: floats
Returns:
float
"""
return a * b / fr.gcd(a, b)
def segment_to_fraction(distance):
"""
Converts lengths of which the plane cuts the axes to fraction.
Tries convert distance to closest nicest fraction with denominator less or
equal than 10. It is
purely for simplicity and clearance of learning purposes. Jenny: 'In typical
courses students usually do not encounter indices any higher than 6'.
If distance is not a number (numpy nan), it means that plane is parallel to
axis or contains it. Inverted fraction to nan (nan is 1/0) = 0 / 1 is
returned
Generally (special cases):
a) if distance is smaller than some constant, i.g. 0.01011,
than fraction's denominator usually much greater than 10.
b) Also, if student will set point on 0.66 -> 1/3, so it is 333 plane,
But if he will slightly move the mouse and click on 0.65 -> it will be
(16,15,16) plane. That's why we are doing adjustments for points coordinates,
to the closest tick, tick + tick / 2 value. And now UI sends to server only
values multiple to 0.05 (half of tick). Same rounding is implemented for
unittests.
But if one will want to calculate miller indices with exact coordinates and
with nice fractions (which produce small Miller indices), he may want shift
to new origin if segments are like S = (0.015, > 0.05, >0.05) - close to zero
in one coordinate. He may update S to (0, >0.05, >0.05) and shift origin.
In this way he can receive nice small fractions. Also there is can be
degenerated case when S = (0.015, 0.012, >0.05) - if update S to (0, 0, >0.05) -
it is a line. This case should be considered separately. Small nice Miller
numbers and possibility to create very small segments can not be implemented
at same time).
Args:
distance: float distance that plane cuts on axis, it must not be 0.
Distance is multiple of 0.05.
Returns:
Inverted fraction.
0 / 1 if distance is nan
"""
if np.isnan(distance):
return fr.Fraction(0, 1)
else:
fract = fr.Fraction(distance).limit_denominator(10)
return fr.Fraction(fract.denominator, fract.numerator)
def sub_miller(segments):
'''
Calculates Miller indices from segments.
Algorithm:
1. Obtain inverted fraction from segments
2. Find common denominator of inverted fractions
3. Lead fractions to common denominator and throws denominator away.
4. Return obtained values.
Args:
List of 3 floats, meaning distances that plane cuts on x, y, z axes.
Any float not equals zero, it means that plane does not intersect origin,
i. e. shift of origin has already been done.
Returns:
String that represents Miller indices, e.g: (-6,3,-6) or (2,2,2)
'''
fracts = [segment_to_fraction(segment) for segment in segments]
common_denominator = reduce(lcm, [fract.denominator for fract in fracts])
miller_indices = ([
fract.numerator * math.fabs(common_denominator) / fract.denominator
for fract in fracts
])
return'(' + ','.join(map(str, map(decimal.Decimal, miller_indices))) + ')'
def miller(points):
"""
Calculates Miller indices from points.
Algorithm:
1. Calculate normal vector to a plane that goes trough all points.
2. Set origin.
3. Create Cartesian coordinate system (Ccs).
4. Find the lengths of segments of which the plane cuts the axes. Equation
of a line for axes: Origin + (Coordinate_vector - Origin) * parameter.
5. If plane goes trough Origin:
a) Find new random origin: find unit cube vertex, not crossed by a plane.
b) Repeat 2-4.
c) Fix signs of segments after Origin shift. This means to consider
original directions of axes. I.g.: Origin was 0,0,0 and became
new_origin. If new_origin has same Y coordinate as Origin, then segment
does not change its sign. But if new_origin has another Y coordinate than
origin (was 0, became 1), than segment has to change its sign (it now
lies on negative side of Y axis). New Origin 0 value of X or Y or Z
coordinate means that segment does not change sign, 1 value -> does
change. So new sign is (1 - 2 * new_origin): 0 -> 1, 1 -> -1
6. Run function that calculates miller indices from segments.
Args:
List of points. Each point is list of float coordinates. Order of
coordinates in point's list: x, y, z. Points are different!
Returns:
String that represents Miller indices, e.g: (-6,3,-6) or (2,2,2)
"""
N = np.cross(points[1] - points[0], points[2] - points[0])
O = np.array([0, 0, 0])
P = points[0] # point of plane
Ccs = map(np.array, [[1.0, 0, 0], [0, 1.0, 0], [0, 0, 1.0]])
segments = ([
np.dot(P - O, N) / np.dot(ort, N) if np.dot(ort, N) != 0
else np.nan for ort in Ccs
])
if any(x == 0 for x in segments): # Plane goes through origin.
vertices = [
# top:
np.array([1.0, 1.0, 1.0]),
np.array([0.0, 0.0, 1.0]),
np.array([1.0, 0.0, 1.0]),
np.array([0.0, 1.0, 1.0]),
# bottom, except 0,0,0:
np.array([1.0, 0.0, 0.0]),
np.array([0.0, 1.0, 0.0]),
np.array([1.0, 1.0, 1.0]),
]
for vertex in vertices:
if np.dot(vertex - O, N) != 0: # vertex not in plane
new_origin = vertex
break
# obtain new axes with center in new origin
X = np.array([1 - new_origin[0], new_origin[1], new_origin[2]])
Y = np.array([new_origin[0], 1 - new_origin[1], new_origin[2]])
Z = np.array([new_origin[0], new_origin[1], 1 - new_origin[2]])
new_Ccs = [X - new_origin, Y - new_origin, Z - new_origin]
segments = ([np.dot(P - new_origin, N) / np.dot(ort, N) if
np.dot(ort, N) != 0 else np.nan for ort in new_Ccs])
# fix signs of indices: 0 -> 1, 1 -> -1 (
segments = (1 - 2 * new_origin) * segments
return sub_miller(segments)
def grade(user_input, correct_answer):
'''
Grade crystallography problem.
Returns true if lattices are the same and Miller indices are same or minus
same. E.g. (2,2,2) = (2, 2, 2) or (-2, -2, -2). Because sign depends only
on student's selection of origin.
Args:
user_input, correct_answer: json. Format:
user_input: {"lattice":"sc","points":[["0.77","0.00","1.00"],
["0.78","1.00","0.00"],["0.00","1.00","0.72"]]}
correct_answer: {'miller': '(00-1)', 'lattice': 'bcc'}
"lattice" is one of: "", "sc", "bcc", "fcc"
Returns:
True or false.
'''
def negative(m):
"""
Change sign of Miller indices.
Args:
m: string with meaning of Miller indices. E.g.:
(-6,3,-6) -> (6, -3, 6)
Returns:
String with changed signs.
"""
output = ''
i = 1
while i in range(1, len(m) - 1):
if m[i] in (',', ' '):
output += m[i]
elif m[i] not in ('-', '0'):
output += '-' + m[i]
elif m[i] == '0':
output += m[i]
else:
i += 1
output += m[i]
i += 1
return '(' + output + ')'
def round0_25(point):
"""
Rounds point coordinates to closest 0.5 value.
Args:
point: list of float coordinates. Order of coordinates: x, y, z.
Returns:
list of coordinates rounded to closes 0.5 value
"""
rounded_points = []
for coord in point:
base = math.floor(coord * 10)
fractional_part = (coord * 10 - base)
aliquot0_25 = math.floor(fractional_part / 0.25)
if aliquot0_25 == 0.0:
rounded_points.append(base / 10)
if aliquot0_25 in (1.0, 2.0):
rounded_points.append(base / 10 + 0.05)
if aliquot0_25 == 3.0:
rounded_points.append(base / 10 + 0.1)
return rounded_points
user_answer = json.loads(user_input)
if user_answer['lattice'] != correct_answer['lattice']:
return False
points = [map(float, p) for p in user_answer['points']]
if len(points) < 3:
return False
# round point to closes 0.05 value
points = [round0_25(point) for point in points]
points = [np.array(point) for point in points]
# print miller(points), (correct_answer['miller'].replace(' ', ''),
# negative(correct_answer['miller']).replace(' ', ''))
if miller(points) in (correct_answer['miller'].replace(' ', ''), negative(correct_answer['miller']).replace(' ', '')):
return True
return False
|
jamestwebber/scipy
|
refs/heads/master
|
scipy/special/_precompute/setup.py
|
27
|
from __future__ import division, print_function, absolute_import
def configuration(parent_name='special', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('_precompute', parent_name, top_path)
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration().todict())
|
ZhangXinNan/tensorflow
|
refs/heads/master
|
tensorflow/contrib/autograph/__init__.py
|
4
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Autograph compiles Python code into equivalent TensorFlow code.
Equivalent here means that they have the same effect when executed.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# TODO(mdan): Bring only the relevant symbols to the top level.
from tensorflow.contrib.autograph import operators
from tensorflow.contrib.autograph import utils
from tensorflow.contrib.autograph.core.errors import GraphConstructionError
from tensorflow.contrib.autograph.core.errors import TfRuntimeError
from tensorflow.contrib.autograph.core.errors import improved_errors
from tensorflow.contrib.autograph.impl.api import RunMode
from tensorflow.contrib.autograph.impl.api import convert
from tensorflow.contrib.autograph.impl.api import converted_call
from tensorflow.contrib.autograph.impl.api import do_not_convert
from tensorflow.contrib.autograph.impl.api import to_code
from tensorflow.contrib.autograph.impl.api import to_graph
from tensorflow.contrib.autograph.lang.directives import set_element_type
from tensorflow.contrib.autograph.lang.directives import set_loop_options
from tensorflow.contrib.autograph.lang.special_functions import stack
from tensorflow.contrib.autograph.lang.special_functions import tensor_list
from tensorflow.contrib.autograph.pyct.transformer import AutographParseError
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
# Main API
'RunMode',
'convert',
'converted_call',
'do_not_convert',
'to_code',
'to_graph',
# Overloaded operators
'operators',
# Errors
'improved_errors',
'GraphConstructionError',
'TfRuntimeError',
# Python language "extensions"
'set_element_type',
'set_loop_options',
'stack',
'tensor_list',
# Exceptions
'AutographParseError',
# Utilities: to be removed
'utils',
]
remove_undocumented(__name__, _allowed_symbols)
|
Fl0rianFischer/sme_odoo
|
refs/heads/9.0
|
addons/l10n_at/__init__.py
|
47
|
# -*- encoding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (C) conexus
import account_wizard
|
dahlstrom-g/intellij-community
|
refs/heads/master
|
python/testData/quickFixes/PyMoveAttributeToInitQuickFixTest/removePass.py
|
83
|
__author__ = 'ktisha'
class A:
def __init__(self):
pass
def foo(self):
self.<caret>b = 1
|
hofmannedv/training-python
|
refs/heads/master
|
usecases/iban/iban.py
|
1
|
#!/usr/bin/python
# -----------------------------------------------------------
# validates an International Bank Account Number (IBAN) using
# a Regular Expression
# demonstrates the usage of the re module
#o
# (C) 2017 Frank Hofmann, Berlin, Germany
# Released under GNU Public License (GPL)
# email [email protected]
# -----------------------------------------------------------
import re
def validateIban(iban):
"validates an International Bank Account Number (IBAN) using a Regular Expression"
# define the pattern
pattern = re.compile("^[a-zA-Z]{2}[0-9]{2}[a-zA-Z0-9]{4}[0-9]{7}([a-zA-Z0-9]?){0,16}$")
# validate the pattern
if re.match(pattern, iban):
return True
else:
return False
# define IBAN dataset
listOfIbans = [
"DE85370100500123456503",
"DE56752500000021114251",
"CH1887050500178114RX55",
]
for iban in listOfIbans:
if validateIban(iban):
print("IBAN %s is valid" % iban)
else:
print("IBAN %s is not valid" % iban)
|
ProjectSWGCore/NGECore2
|
refs/heads/master
|
scripts/loot/lootItems/re_junk/comlink.py
|
2
|
def itemTemplate():
return ['object/tangible/loot/npc_loot/shared_comlink_civilian_generic.iff']
def customItemName():
return 'Comlink'
def stackable():
return 1
def junkDealerPrice():
return 15
def junkType():
return 0
|
bright-sparks/chromium-spacewalk
|
refs/heads/master
|
build/android/pylib/utils/json_results_generator.py
|
36
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Most of this file was ported over from Blink's
# Tools/Scripts/webkitpy/layout_tests/layout_package/json_results_generator.py
# Tools/Scripts/webkitpy/common/net/file_uploader.py
#
import json
import logging
import mimetypes
import os
import time
import urllib2
_log = logging.getLogger(__name__)
_JSON_PREFIX = 'ADD_RESULTS('
_JSON_SUFFIX = ');'
def HasJSONWrapper(string):
return string.startswith(_JSON_PREFIX) and string.endswith(_JSON_SUFFIX)
def StripJSONWrapper(json_content):
# FIXME: Kill this code once the server returns json instead of jsonp.
if HasJSONWrapper(json_content):
return json_content[len(_JSON_PREFIX):len(json_content) - len(_JSON_SUFFIX)]
return json_content
def WriteJSON(json_object, file_path, callback=None):
# Specify separators in order to get compact encoding.
json_string = json.dumps(json_object, separators=(',', ':'))
if callback:
json_string = callback + '(' + json_string + ');'
with open(file_path, 'w') as fp:
fp.write(json_string)
def ConvertTrieToFlatPaths(trie, prefix=None):
"""Flattens the trie of paths, prepending a prefix to each."""
result = {}
for name, data in trie.iteritems():
if prefix:
name = prefix + '/' + name
if len(data) and not 'results' in data:
result.update(ConvertTrieToFlatPaths(data, name))
else:
result[name] = data
return result
def AddPathToTrie(path, value, trie):
"""Inserts a single path and value into a directory trie structure."""
if not '/' in path:
trie[path] = value
return
directory, _slash, rest = path.partition('/')
if not directory in trie:
trie[directory] = {}
AddPathToTrie(rest, value, trie[directory])
def TestTimingsTrie(individual_test_timings):
"""Breaks a test name into dicts by directory
foo/bar/baz.html: 1ms
foo/bar/baz1.html: 3ms
becomes
foo: {
bar: {
baz.html: 1,
baz1.html: 3
}
}
"""
trie = {}
for test_result in individual_test_timings:
test = test_result.test_name
AddPathToTrie(test, int(1000 * test_result.test_run_time), trie)
return trie
class TestResult(object):
"""A simple class that represents a single test result."""
# Test modifier constants.
(NONE, FAILS, FLAKY, DISABLED) = range(4)
def __init__(self, test, failed=False, elapsed_time=0):
self.test_name = test
self.failed = failed
self.test_run_time = elapsed_time
test_name = test
try:
test_name = test.split('.')[1]
except IndexError:
_log.warn('Invalid test name: %s.', test)
if test_name.startswith('FAILS_'):
self.modifier = self.FAILS
elif test_name.startswith('FLAKY_'):
self.modifier = self.FLAKY
elif test_name.startswith('DISABLED_'):
self.modifier = self.DISABLED
else:
self.modifier = self.NONE
def Fixable(self):
return self.failed or self.modifier == self.DISABLED
class JSONResultsGeneratorBase(object):
"""A JSON results generator for generic tests."""
MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG = 750
# Min time (seconds) that will be added to the JSON.
MIN_TIME = 1
# Note that in non-chromium tests those chars are used to indicate
# test modifiers (FAILS, FLAKY, etc) but not actual test results.
PASS_RESULT = 'P'
SKIP_RESULT = 'X'
FAIL_RESULT = 'F'
FLAKY_RESULT = 'L'
NO_DATA_RESULT = 'N'
MODIFIER_TO_CHAR = {TestResult.NONE: PASS_RESULT,
TestResult.DISABLED: SKIP_RESULT,
TestResult.FAILS: FAIL_RESULT,
TestResult.FLAKY: FLAKY_RESULT}
VERSION = 4
VERSION_KEY = 'version'
RESULTS = 'results'
TIMES = 'times'
BUILD_NUMBERS = 'buildNumbers'
TIME = 'secondsSinceEpoch'
TESTS = 'tests'
FIXABLE_COUNT = 'fixableCount'
FIXABLE = 'fixableCounts'
ALL_FIXABLE_COUNT = 'allFixableCount'
RESULTS_FILENAME = 'results.json'
TIMES_MS_FILENAME = 'times_ms.json'
INCREMENTAL_RESULTS_FILENAME = 'incremental_results.json'
# line too long pylint: disable=C0301
URL_FOR_TEST_LIST_JSON = (
'http://%s/testfile?builder=%s&name=%s&testlistjson=1&testtype=%s&master=%s')
# pylint: enable=C0301
def __init__(self, builder_name, build_name, build_number,
results_file_base_path, builder_base_url,
test_results_map, svn_repositories=None,
test_results_server=None,
test_type='',
master_name=''):
"""Modifies the results.json file. Grabs it off the archive directory
if it is not found locally.
Args
builder_name: the builder name (e.g. Webkit).
build_name: the build name (e.g. webkit-rel).
build_number: the build number.
results_file_base_path: Absolute path to the directory containing the
results json file.
builder_base_url: the URL where we have the archived test results.
If this is None no archived results will be retrieved.
test_results_map: A dictionary that maps test_name to TestResult.
svn_repositories: A (json_field_name, svn_path) pair for SVN
repositories that tests rely on. The SVN revision will be
included in the JSON with the given json_field_name.
test_results_server: server that hosts test results json.
test_type: test type string (e.g. 'layout-tests').
master_name: the name of the buildbot master.
"""
self._builder_name = builder_name
self._build_name = build_name
self._build_number = build_number
self._builder_base_url = builder_base_url
self._results_directory = results_file_base_path
self._test_results_map = test_results_map
self._test_results = test_results_map.values()
self._svn_repositories = svn_repositories
if not self._svn_repositories:
self._svn_repositories = {}
self._test_results_server = test_results_server
self._test_type = test_type
self._master_name = master_name
self._archived_results = None
def GenerateJSONOutput(self):
json_object = self.GetJSON()
if json_object:
file_path = (
os.path.join(
self._results_directory,
self.INCREMENTAL_RESULTS_FILENAME))
WriteJSON(json_object, file_path)
def GenerateTimesMSFile(self):
times = TestTimingsTrie(self._test_results_map.values())
file_path = os.path.join(self._results_directory, self.TIMES_MS_FILENAME)
WriteJSON(times, file_path)
def GetJSON(self):
"""Gets the results for the results.json file."""
results_json = {}
if not results_json:
results_json, error = self._GetArchivedJSONResults()
if error:
# If there was an error don't write a results.json
# file at all as it would lose all the information on the
# bot.
_log.error('Archive directory is inaccessible. Not '
'modifying or clobbering the results.json '
'file: ' + str(error))
return None
builder_name = self._builder_name
if results_json and builder_name not in results_json:
_log.debug('Builder name (%s) is not in the results.json file.'
% builder_name)
self._ConvertJSONToCurrentVersion(results_json)
if builder_name not in results_json:
results_json[builder_name] = (
self._CreateResultsForBuilderJSON())
results_for_builder = results_json[builder_name]
if builder_name:
self._InsertGenericMetaData(results_for_builder)
self._InsertFailureSummaries(results_for_builder)
# Update the all failing tests with result type and time.
tests = results_for_builder[self.TESTS]
all_failing_tests = self._GetFailedTestNames()
all_failing_tests.update(ConvertTrieToFlatPaths(tests))
for test in all_failing_tests:
self._InsertTestTimeAndResult(test, tests)
return results_json
def SetArchivedResults(self, archived_results):
self._archived_results = archived_results
def UploadJSONFiles(self, json_files):
"""Uploads the given json_files to the test_results_server (if the
test_results_server is given)."""
if not self._test_results_server:
return
if not self._master_name:
_log.error(
'--test-results-server was set, but --master-name was not. Not '
'uploading JSON files.')
return
_log.info('Uploading JSON files for builder: %s', self._builder_name)
attrs = [('builder', self._builder_name),
('testtype', self._test_type),
('master', self._master_name)]
files = [(json_file, os.path.join(self._results_directory, json_file))
for json_file in json_files]
url = 'http://%s/testfile/upload' % self._test_results_server
# Set uploading timeout in case appengine server is having problems.
# 120 seconds are more than enough to upload test results.
uploader = _FileUploader(url, 120)
try:
response = uploader.UploadAsMultipartFormData(files, attrs)
if response:
if response.code == 200:
_log.info('JSON uploaded.')
else:
_log.debug(
"JSON upload failed, %d: '%s'" %
(response.code, response.read()))
else:
_log.error('JSON upload failed; no response returned')
except Exception, err:
_log.error('Upload failed: %s' % err)
return
def _GetTestTiming(self, test_name):
"""Returns test timing data (elapsed time) in second
for the given test_name."""
if test_name in self._test_results_map:
# Floor for now to get time in seconds.
return int(self._test_results_map[test_name].test_run_time)
return 0
def _GetFailedTestNames(self):
"""Returns a set of failed test names."""
return set([r.test_name for r in self._test_results if r.failed])
def _GetModifierChar(self, test_name):
"""Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test modifier
for the given test_name.
"""
if test_name not in self._test_results_map:
return self.__class__.NO_DATA_RESULT
test_result = self._test_results_map[test_name]
if test_result.modifier in self.MODIFIER_TO_CHAR.keys():
return self.MODIFIER_TO_CHAR[test_result.modifier]
return self.__class__.PASS_RESULT
def _get_result_char(self, test_name):
"""Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test result
for the given test_name.
"""
if test_name not in self._test_results_map:
return self.__class__.NO_DATA_RESULT
test_result = self._test_results_map[test_name]
if test_result.modifier == TestResult.DISABLED:
return self.__class__.SKIP_RESULT
if test_result.failed:
return self.__class__.FAIL_RESULT
return self.__class__.PASS_RESULT
def _GetSVNRevision(self, in_directory):
"""Returns the svn revision for the given directory.
Args:
in_directory: The directory where svn is to be run.
"""
# This is overridden in flakiness_dashboard_results_uploader.py.
raise NotImplementedError()
def _GetArchivedJSONResults(self):
"""Download JSON file that only contains test
name list from test-results server. This is for generating incremental
JSON so the file generated has info for tests that failed before but
pass or are skipped from current run.
Returns (archived_results, error) tuple where error is None if results
were successfully read.
"""
results_json = {}
old_results = None
error = None
if not self._test_results_server:
return {}, None
results_file_url = (self.URL_FOR_TEST_LIST_JSON %
(urllib2.quote(self._test_results_server),
urllib2.quote(self._builder_name),
self.RESULTS_FILENAME,
urllib2.quote(self._test_type),
urllib2.quote(self._master_name)))
try:
# FIXME: We should talk to the network via a Host object.
results_file = urllib2.urlopen(results_file_url)
old_results = results_file.read()
except urllib2.HTTPError, http_error:
# A non-4xx status code means the bot is hosed for some reason
# and we can't grab the results.json file off of it.
if (http_error.code < 400 and http_error.code >= 500):
error = http_error
except urllib2.URLError, url_error:
error = url_error
if old_results:
# Strip the prefix and suffix so we can get the actual JSON object.
old_results = StripJSONWrapper(old_results)
try:
results_json = json.loads(old_results)
except Exception:
_log.debug('results.json was not valid JSON. Clobbering.')
# The JSON file is not valid JSON. Just clobber the results.
results_json = {}
else:
_log.debug('Old JSON results do not exist. Starting fresh.')
results_json = {}
return results_json, error
def _InsertFailureSummaries(self, results_for_builder):
"""Inserts aggregate pass/failure statistics into the JSON.
This method reads self._test_results and generates
FIXABLE, FIXABLE_COUNT and ALL_FIXABLE_COUNT entries.
Args:
results_for_builder: Dictionary containing the test results for a
single builder.
"""
# Insert the number of tests that failed or skipped.
fixable_count = len([r for r in self._test_results if r.Fixable()])
self._InsertItemIntoRawList(results_for_builder,
fixable_count, self.FIXABLE_COUNT)
# Create a test modifiers (FAILS, FLAKY etc) summary dictionary.
entry = {}
for test_name in self._test_results_map.iterkeys():
result_char = self._GetModifierChar(test_name)
entry[result_char] = entry.get(result_char, 0) + 1
# Insert the pass/skip/failure summary dictionary.
self._InsertItemIntoRawList(results_for_builder, entry,
self.FIXABLE)
# Insert the number of all the tests that are supposed to pass.
all_test_count = len(self._test_results)
self._InsertItemIntoRawList(results_for_builder,
all_test_count, self.ALL_FIXABLE_COUNT)
def _InsertItemIntoRawList(self, results_for_builder, item, key):
"""Inserts the item into the list with the given key in the results for
this builder. Creates the list if no such list exists.
Args:
results_for_builder: Dictionary containing the test results for a
single builder.
item: Number or string to insert into the list.
key: Key in results_for_builder for the list to insert into.
"""
if key in results_for_builder:
raw_list = results_for_builder[key]
else:
raw_list = []
raw_list.insert(0, item)
raw_list = raw_list[:self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG]
results_for_builder[key] = raw_list
def _InsertItemRunLengthEncoded(self, item, encoded_results):
"""Inserts the item into the run-length encoded results.
Args:
item: String or number to insert.
encoded_results: run-length encoded results. An array of arrays, e.g.
[[3,'A'],[1,'Q']] encodes AAAQ.
"""
if len(encoded_results) and item == encoded_results[0][1]:
num_results = encoded_results[0][0]
if num_results <= self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
encoded_results[0][0] = num_results + 1
else:
# Use a list instead of a class for the run-length encoding since
# we want the serialized form to be concise.
encoded_results.insert(0, [1, item])
def _InsertGenericMetaData(self, results_for_builder):
""" Inserts generic metadata (such as version number, current time etc)
into the JSON.
Args:
results_for_builder: Dictionary containing the test results for
a single builder.
"""
self._InsertItemIntoRawList(results_for_builder,
self._build_number, self.BUILD_NUMBERS)
# Include SVN revisions for the given repositories.
for (name, path) in self._svn_repositories:
# Note: for JSON file's backward-compatibility we use 'chrome' rather
# than 'chromium' here.
lowercase_name = name.lower()
if lowercase_name == 'chromium':
lowercase_name = 'chrome'
self._InsertItemIntoRawList(results_for_builder,
self._GetSVNRevision(path),
lowercase_name + 'Revision')
self._InsertItemIntoRawList(results_for_builder,
int(time.time()),
self.TIME)
def _InsertTestTimeAndResult(self, test_name, tests):
""" Insert a test item with its results to the given tests dictionary.
Args:
tests: Dictionary containing test result entries.
"""
result = self._get_result_char(test_name)
test_time = self._GetTestTiming(test_name)
this_test = tests
for segment in test_name.split('/'):
if segment not in this_test:
this_test[segment] = {}
this_test = this_test[segment]
if not len(this_test):
self._PopulateResutlsAndTimesJSON(this_test)
if self.RESULTS in this_test:
self._InsertItemRunLengthEncoded(result, this_test[self.RESULTS])
else:
this_test[self.RESULTS] = [[1, result]]
if self.TIMES in this_test:
self._InsertItemRunLengthEncoded(test_time, this_test[self.TIMES])
else:
this_test[self.TIMES] = [[1, test_time]]
def _ConvertJSONToCurrentVersion(self, results_json):
"""If the JSON does not match the current version, converts it to the
current version and adds in the new version number.
"""
if self.VERSION_KEY in results_json:
archive_version = results_json[self.VERSION_KEY]
if archive_version == self.VERSION:
return
else:
archive_version = 3
# version 3->4
if archive_version == 3:
for results in results_json.values():
self._ConvertTestsToTrie(results)
results_json[self.VERSION_KEY] = self.VERSION
def _ConvertTestsToTrie(self, results):
if not self.TESTS in results:
return
test_results = results[self.TESTS]
test_results_trie = {}
for test in test_results.iterkeys():
single_test_result = test_results[test]
AddPathToTrie(test, single_test_result, test_results_trie)
results[self.TESTS] = test_results_trie
def _PopulateResutlsAndTimesJSON(self, results_and_times):
results_and_times[self.RESULTS] = []
results_and_times[self.TIMES] = []
return results_and_times
def _CreateResultsForBuilderJSON(self):
results_for_builder = {}
results_for_builder[self.TESTS] = {}
return results_for_builder
def _RemoveItemsOverMaxNumberOfBuilds(self, encoded_list):
"""Removes items from the run-length encoded list after the final
item that exceeds the max number of builds to track.
Args:
encoded_results: run-length encoded results. An array of arrays, e.g.
[[3,'A'],[1,'Q']] encodes AAAQ.
"""
num_builds = 0
index = 0
for result in encoded_list:
num_builds = num_builds + result[0]
index = index + 1
if num_builds > self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
return encoded_list[:index]
return encoded_list
def _NormalizeResultsJSON(self, test, test_name, tests):
""" Prune tests where all runs pass or tests that no longer exist and
truncate all results to maxNumberOfBuilds.
Args:
test: ResultsAndTimes object for this test.
test_name: Name of the test.
tests: The JSON object with all the test results for this builder.
"""
test[self.RESULTS] = self._RemoveItemsOverMaxNumberOfBuilds(
test[self.RESULTS])
test[self.TIMES] = self._RemoveItemsOverMaxNumberOfBuilds(
test[self.TIMES])
is_all_pass = self._IsResultsAllOfType(test[self.RESULTS],
self.PASS_RESULT)
is_all_no_data = self._IsResultsAllOfType(test[self.RESULTS],
self.NO_DATA_RESULT)
max_time = max([test_time[1] for test_time in test[self.TIMES]])
# Remove all passes/no-data from the results to reduce noise and
# filesize. If a test passes every run, but takes > MIN_TIME to run,
# don't throw away the data.
if is_all_no_data or (is_all_pass and max_time <= self.MIN_TIME):
del tests[test_name]
# method could be a function pylint: disable=R0201
def _IsResultsAllOfType(self, results, result_type):
"""Returns whether all the results are of the given type
(e.g. all passes)."""
return len(results) == 1 and results[0][1] == result_type
class _FileUploader(object):
def __init__(self, url, timeout_seconds):
self._url = url
self._timeout_seconds = timeout_seconds
def UploadAsMultipartFormData(self, files, attrs):
file_objs = []
for filename, path in files:
with file(path, 'rb') as fp:
file_objs.append(('file', filename, fp.read()))
# FIXME: We should use the same variable names for the formal and actual
# parameters.
content_type, data = _EncodeMultipartFormData(attrs, file_objs)
return self._UploadData(content_type, data)
def _UploadData(self, content_type, data):
start = time.time()
end = start + self._timeout_seconds
while time.time() < end:
try:
request = urllib2.Request(self._url, data,
{'Content-Type': content_type})
return urllib2.urlopen(request)
except urllib2.HTTPError as e:
_log.warn("Received HTTP status %s loading \"%s\". "
'Retrying in 10 seconds...' % (e.code, e.filename))
time.sleep(10)
def _GetMIMEType(filename):
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# FIXME: Rather than taking tuples, this function should take more
# structured data.
def _EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://code.google.com/p/rietveld/source/browse/trunk/upload.py
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for key, value in fields:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
if isinstance(value, unicode):
value = value.encode('utf-8')
lines.append(value)
for key, filename, value in files:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; '
'filename="%s"' % (key, filename))
lines.append('Content-Type: %s' % _GetMIMEType(filename))
lines.append('')
if isinstance(value, unicode):
value = value.encode('utf-8')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
|
bensondaled/pyfluo
|
refs/heads/v2
|
pyfluo/util/progress/__init__.py
|
1
|
from .progress import Progress, Elapsed
from .progressbar import *
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.