repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
vishnu2kmohan/dcos | refs/heads/master | pkgpanda/build/cli.py | 6 | """Panda package builder
Reads a buildinfo file, uses it to assemble the sources and determine the
package version, then builds the package in an isolated environment along with
the necessary dependencies.
Usage:
mkpanda [--repository-url=<repository_url>] [--dont-clean-after-build] [--recursive] [--variant=<variant>]
mkpanda tree [--mkbootstrap] [--repository-url=<repository_url>] [--variant=<variant>]
"""
import sys
from os import getcwd, umask
from os.path import basename, normpath
from docopt import docopt
import pkgpanda.build
import pkgpanda.build.constants
def main():
try:
arguments = docopt(__doc__, version="mkpanda {}".format(pkgpanda.build.constants.version))
umask(0o022)
variant_arg = arguments['--variant']
# map the keyword 'default' to None to build default as this is how default is internally
# represented, but use the None argument (i.e. the lack of variant arguments) to trigger all variants
target_variant = variant_arg if variant_arg != 'default' else None
# Make a local repository for build dependencies
if arguments['tree']:
package_store = pkgpanda.build.PackageStore(getcwd(), arguments['--repository-url'])
if variant_arg is None:
pkgpanda.build.build_tree_variants(package_store, arguments['--mkbootstrap'])
else:
pkgpanda.build.build_tree(package_store, arguments['--mkbootstrap'], target_variant)
sys.exit(0)
# Package name is the folder name.
name = basename(getcwd())
# Package store is always the parent directory
package_store = pkgpanda.build.PackageStore(normpath(getcwd() + '/../'), arguments['--repository-url'])
# Check that the folder is a package folder (the name was found by the package store as a
# valid package with 1+ variants).
if name not in package_store.packages_by_name:
print("Not a valid package folder. Didn't find any 'buildinfo.json' files.", file=sys.stderr)
sys.exit(1)
clean_after_build = not arguments['--dont-clean-after-build']
recursive = arguments['--recursive']
if variant_arg is None:
# No command -> build all package variants.
pkg_dict = pkgpanda.build.build_package_variants(
package_store,
name,
clean_after_build,
recursive)
else:
# variant given, only build that one package variant
pkg_dict = {
target_variant: pkgpanda.build.build(
package_store,
name,
target_variant,
clean_after_build,
recursive)
}
print("Package variants available as:")
for k, v in pkg_dict.items():
if k is None:
k = "<default>"
print(k + ':' + v)
sys.exit(0)
except pkgpanda.build.BuildError as ex:
print("ERROR: {}".format(ex))
sys.exit(1)
if __name__ == "__main__":
main()
|
meabsence/python-for-android | refs/heads/master | python-build/python-libs/gdata/build/lib/gdata/tlslite/utils/Python_AES.py | 359 | """Pure-Python AES implementation."""
from cryptomath import *
from AES import *
from rijndael import rijndael
def new(key, mode, IV):
return Python_AES(key, mode, IV)
class Python_AES(AES):
def __init__(self, key, mode, IV):
AES.__init__(self, key, mode, IV, "python")
self.rijndael = rijndael(key, 16)
self.IV = IV
def encrypt(self, plaintext):
AES.encrypt(self, plaintext)
plaintextBytes = stringToBytes(plaintext)
chainBytes = stringToBytes(self.IV)
#CBC Mode: For each block...
for x in range(len(plaintextBytes)/16):
#XOR with the chaining block
blockBytes = plaintextBytes[x*16 : (x*16)+16]
for y in range(16):
blockBytes[y] ^= chainBytes[y]
blockString = bytesToString(blockBytes)
#Encrypt it
encryptedBytes = stringToBytes(self.rijndael.encrypt(blockString))
#Overwrite the input with the output
for y in range(16):
plaintextBytes[(x*16)+y] = encryptedBytes[y]
#Set the next chaining block
chainBytes = encryptedBytes
self.IV = bytesToString(chainBytes)
return bytesToString(plaintextBytes)
def decrypt(self, ciphertext):
AES.decrypt(self, ciphertext)
ciphertextBytes = stringToBytes(ciphertext)
chainBytes = stringToBytes(self.IV)
#CBC Mode: For each block...
for x in range(len(ciphertextBytes)/16):
#Decrypt it
blockBytes = ciphertextBytes[x*16 : (x*16)+16]
blockString = bytesToString(blockBytes)
decryptedBytes = stringToBytes(self.rijndael.decrypt(blockString))
#XOR with the chaining block and overwrite the input with output
for y in range(16):
decryptedBytes[y] ^= chainBytes[y]
ciphertextBytes[(x*16)+y] = decryptedBytes[y]
#Set the next chaining block
chainBytes = blockBytes
self.IV = bytesToString(chainBytes)
return bytesToString(ciphertextBytes)
|
DelazJ/QGIS | refs/heads/master | python/plugins/processing/gui/RangePanel.py | 30 | # -*- coding: utf-8 -*-
"""
***************************************************************************
RangePanel.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
import os
import warnings
from qgis.PyQt import uic
from qgis.PyQt.QtCore import pyqtSignal
from qgis.PyQt.QtWidgets import QDialog
from qgis.core import QgsProcessingParameterNumber
pluginPath = os.path.split(os.path.dirname(__file__))[0]
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
WIDGET, BASE = uic.loadUiType(
os.path.join(pluginPath, 'ui', 'widgetRangeSelector.ui'))
class RangePanel(BASE, WIDGET):
hasChanged = pyqtSignal()
def __init__(self, param):
super(RangePanel, self).__init__(None)
self.setupUi(self)
self.param = param
# Integer or Double range
if self.param.dataType() == QgsProcessingParameterNumber.Integer:
self.spnMin.setDecimals(0)
self.spnMax.setDecimals(0)
if param.defaultValue() is not None:
self.setValue(param.defaultValue())
values = self.getValues()
# Spin range logic
self.spnMin.valueChanged.connect(lambda: self.setMinMax())
self.spnMax.valueChanged.connect(lambda: self.setMaxMin())
def setMinMax(self):
values = self.getValues()
if values[0] >= values[1]:
self.spnMax.setValue(values[0])
self.hasChanged.emit()
def setMaxMin(self):
values = self.getValues()
if values[0] >= values[1]:
self.spnMin.setValue(values[1])
self.hasChanged.emit()
def getValue(self):
return '{},{}'.format(self.spnMin.value(), self.spnMax.value())
def getValues(self):
value = self.getValue()
if value:
return [float(a) for a in value.split(',')]
def setValue(self, value):
try:
values = value.split(',')
minVal = float(values[0])
maxVal = float(values[1])
self.spnMin.setValue(float(minVal))
self.spnMax.setValue(float(maxVal))
except:
return
|
Meriipu/quodlibet | refs/heads/master | quodlibet/formats/_image.py | 2 | # Copyright 2013 Christoph Reiter
# 2020 Nick Boultbee
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
from ._misc import AudioFileError
class ImageContainer:
"""Mixin/Interface for AudioFile to support basic embedded image editing"""
def get_primary_image(self):
"""Returns the primary embedded image or None.
In case of an error returns None.
"""
return
def get_images(self):
"""Returns a list of embedded images, primary first.
In case of an error returns an empty list.
"""
# fall back to the single implementation
image = self.get_primary_image()
if image:
return [image]
return []
@property
def has_images(self):
"""Fast way to check for images, might be False if the file
was modified externally.
"""
return "~picture" in self
@has_images.setter
def has_images(self, value):
if value:
self["~picture"] = "y"
else:
self.pop("~picture", None)
@property
def can_change_images(self):
"""Return True IFF `clear_images()` and `set_images()` are
implemented"""
return False
def clear_images(self):
"""Delete all embedded images.
Raises:
AudioFileError
"""
raise AudioFileError("Not supported for this format")
def set_image(self, image):
"""Replaces all embedded images by the passed image.
The image type recorded in the file will be APICType.COVER_FRONT,
disregarding image.type.
Raises:
AudioFileError
"""
raise AudioFileError("Not supported for this format")
class APICType:
"""Enumeration of image types defined by the ID3 standard but also reused
in WMA/FLAC/VorbisComment
"""
# Other
OTHER = 0
# 32x32 pixels 'file icon' (PNG only)
FILE_ICON = 1
# Other file icon
OTHER_FILE_ICON = 2
# Cover (front)
COVER_FRONT = 3
# Cover (back)
COVER_BACK = 4
# Leaflet page
LEAFLET_PAGE = 5
# Media (e.g. label side of CD)
MEDIA = 6
# Lead artist/lead performer/soloist
LEAD_ARTIST = 7
# Artist/performer
ARTIST = 8
# Conductor
CONDUCTOR = 9
# Band/Orchestra
BAND = 10
# Composer
COMPOSER = 11
# Lyricist/text writer
LYRISCIST = 12
# Recording Location
RECORDING_LOCATION = 13
# During recording
DURING_RECORDING = 14
# During performance
DURING_PERFORMANCE = 15
# Movie/video screen capture
SCREEN_CAPTURE = 16
# A bright coloured fish
FISH = 17
# Illustration
ILLUSTRATION = 18
# Band/artist logotype
BAND_LOGOTYPE = 19
# Publisher/Studio logotype
PUBLISHER_LOGOTYPE = 20
@classmethod
def to_string(cls, value):
for k, v in cls.__dict__.items():
if v == value:
return k
return ""
@classmethod
def is_valid(cls, value):
return cls.OTHER <= value <= cls.PUBLISHER_LOGOTYPE
@classmethod
def sort_key(cls, value):
"""Sorts picture types, most important picture is the lowest.
Important is defined as most representative of an album release, ymmv.
"""
# index value -> important
important = [
cls.LEAFLET_PAGE, cls.MEDIA,
cls.COVER_BACK, cls.COVER_FRONT
]
try:
return -important.index(value)
except ValueError:
if value < cls.COVER_FRONT:
return 100 - value
else:
return value
class EmbeddedImage:
"""Embedded image, contains most of the properties needed
for FLAC and ID3 images.
"""
def __init__(self, fileobj, mime_type, width=-1, height=-1, color_depth=-1,
type_=APICType.OTHER):
self.mime_type = mime_type
self.width = width
self.height = height
self.color_depth = color_depth
self.file = fileobj
self.type = type_
def __repr__(self):
return "<%s mime_type=%r width=%d height=%d type=%s file=%r>" % (
type(self).__name__, self.mime_type, self.width, self.height,
APICType.to_string(self.type), self.file)
def read(self):
"""Read the raw image data
Returns:
bytes
Raises:
IOError
"""
self.file.seek(0)
data = self.file.read()
self.file.seek(0)
return data
@property
def sort_key(self):
return APICType.sort_key(self.type)
@property
def extensions(self):
"""A possibly empty list of extensions e.g. ["jpeg", jpg"]"""
from gi.repository import GdkPixbuf
for format_ in GdkPixbuf.Pixbuf.get_formats():
if self.mime_type in format_.get_mime_types():
return format_.get_extensions()
return []
@classmethod
def from_path(cls, path):
"""Reads the header of `path` and creates a new image instance
or None.
"""
from gi.repository import GdkPixbuf, GLib
pb = []
# Feed data to PixbufLoader until it emits area-prepared,
# get the partially filled pixbuf and extract the needed
# information.
def area_prepared(loader):
pb.append(loader.get_pixbuf())
loader = GdkPixbuf.PixbufLoader()
loader.connect("area-prepared", area_prepared)
try:
with open(path, "rb") as h:
while not pb:
data = h.read(1024)
if data:
loader.write(data)
else:
break
except (EnvironmentError, GLib.GError):
return
finally:
try:
loader.close()
except GLib.GError:
pass
if not pb:
return
pb = pb[0]
width = pb.get_width()
height = pb.get_height()
color_depth = pb.get_bits_per_sample()
format_ = loader.get_format()
mime_types = format_.get_mime_types()
mime_type = mime_types and mime_types[0] or ""
try:
return cls(open(path, "rb"), mime_type, width, height, color_depth)
except EnvironmentError:
return
|
belmiromoreira/nova | refs/heads/master | nova/tests/unit/virt/xenapi/image/test_vdi_through_dev.py | 70 | # Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import tarfile
import eventlet
from nova.image import glance
from nova import test
from nova.virt.xenapi.client import session as xenapi_session
from nova.virt.xenapi.image import vdi_through_dev
@contextlib.contextmanager
def fake_context(result=None):
yield result
class TestDelegatingToCommand(test.NoDBTestCase):
def test_upload_image_is_delegated_to_command(self):
command = self.mox.CreateMock(vdi_through_dev.UploadToGlanceAsRawTgz)
self.mox.StubOutWithMock(vdi_through_dev, 'UploadToGlanceAsRawTgz')
vdi_through_dev.UploadToGlanceAsRawTgz(
'ctx', 'session', 'instance', 'image_id', 'vdis').AndReturn(
command)
command.upload_image().AndReturn('result')
self.mox.ReplayAll()
store = vdi_through_dev.VdiThroughDevStore()
result = store.upload_image(
'ctx', 'session', 'instance', 'image_id', 'vdis')
self.assertEqual('result', result)
class TestUploadToGlanceAsRawTgz(test.NoDBTestCase):
def test_upload_image(self):
store = vdi_through_dev.UploadToGlanceAsRawTgz(
'context', 'session', 'instance', 'id', ['vdi0', 'vdi1'])
self.mox.StubOutWithMock(store, '_perform_upload')
self.mox.StubOutWithMock(store, '_get_vdi_ref')
self.mox.StubOutWithMock(vdi_through_dev, 'glance')
self.mox.StubOutWithMock(vdi_through_dev, 'vm_utils')
self.mox.StubOutWithMock(vdi_through_dev, 'utils')
store._get_vdi_ref().AndReturn('vdi_ref')
vdi_through_dev.vm_utils.vdi_attached_here(
'session', 'vdi_ref', read_only=True).AndReturn(
fake_context('dev'))
vdi_through_dev.utils.make_dev_path('dev').AndReturn('devpath')
vdi_through_dev.utils.temporary_chown('devpath').AndReturn(
fake_context())
store._perform_upload('devpath')
self.mox.ReplayAll()
store.upload_image()
def test__perform_upload(self):
producer = self.mox.CreateMock(vdi_through_dev.TarGzProducer)
consumer = self.mox.CreateMock(glance.UpdateGlanceImage)
pool = self.mox.CreateMock(eventlet.GreenPool)
store = vdi_through_dev.UploadToGlanceAsRawTgz(
'context', 'session', 'instance', 'id', ['vdi0', 'vdi1'])
self.mox.StubOutWithMock(store, '_create_pipe')
self.mox.StubOutWithMock(store, '_get_virtual_size')
self.mox.StubOutWithMock(producer, 'get_metadata')
self.mox.StubOutWithMock(vdi_through_dev, 'TarGzProducer')
self.mox.StubOutWithMock(glance, 'UpdateGlanceImage')
self.mox.StubOutWithMock(vdi_through_dev, 'eventlet')
producer.get_metadata().AndReturn('metadata')
store._get_virtual_size().AndReturn('324')
store._create_pipe().AndReturn(('readfile', 'writefile'))
vdi_through_dev.TarGzProducer(
'devpath', 'writefile', '324', 'disk.raw').AndReturn(
producer)
glance.UpdateGlanceImage('context', 'id', 'metadata',
'readfile').AndReturn(consumer)
vdi_through_dev.eventlet.GreenPool().AndReturn(pool)
pool.spawn(producer.start)
pool.spawn(consumer.start)
pool.waitall()
self.mox.ReplayAll()
store._perform_upload('devpath')
def test__get_vdi_ref(self):
session = self.mox.CreateMock(xenapi_session.XenAPISession)
store = vdi_through_dev.UploadToGlanceAsRawTgz(
'context', session, 'instance', 'id', ['vdi0', 'vdi1'])
session.call_xenapi('VDI.get_by_uuid', 'vdi0').AndReturn('vdi_ref')
self.mox.ReplayAll()
self.assertEqual('vdi_ref', store._get_vdi_ref())
def test__get_virtual_size(self):
session = self.mox.CreateMock(xenapi_session.XenAPISession)
store = vdi_through_dev.UploadToGlanceAsRawTgz(
'context', session, 'instance', 'id', ['vdi0', 'vdi1'])
self.mox.StubOutWithMock(store, '_get_vdi_ref')
store._get_vdi_ref().AndReturn('vdi_ref')
session.call_xenapi('VDI.get_virtual_size', 'vdi_ref')
self.mox.ReplayAll()
store._get_virtual_size()
def test__create_pipe(self):
store = vdi_through_dev.UploadToGlanceAsRawTgz(
'context', 'session', 'instance', 'id', ['vdi0', 'vdi1'])
self.mox.StubOutWithMock(vdi_through_dev, 'os')
self.mox.StubOutWithMock(vdi_through_dev, 'greenio')
vdi_through_dev.os.pipe().AndReturn(('rpipe', 'wpipe'))
vdi_through_dev.greenio.GreenPipe('rpipe', 'rb', 0).AndReturn('rfile')
vdi_through_dev.greenio.GreenPipe('wpipe', 'wb', 0).AndReturn('wfile')
self.mox.ReplayAll()
result = store._create_pipe()
self.assertEqual(('rfile', 'wfile'), result)
class TestTarGzProducer(test.NoDBTestCase):
def test_constructor(self):
producer = vdi_through_dev.TarGzProducer('devpath', 'writefile',
'100', 'fname')
self.assertEqual('devpath', producer.fpath)
self.assertEqual('writefile', producer.output)
self.assertEqual('100', producer.size)
self.assertEqual('writefile', producer.output)
def test_start(self):
outf = self.mox.CreateMock(file)
producer = vdi_through_dev.TarGzProducer('fpath', outf,
'100', 'fname')
tfile = self.mox.CreateMock(tarfile.TarFile)
tinfo = self.mox.CreateMock(tarfile.TarInfo)
inf = self.mox.CreateMock(file)
self.mox.StubOutWithMock(vdi_through_dev, 'tarfile')
self.mox.StubOutWithMock(producer, '_open_file')
vdi_through_dev.tarfile.TarInfo(name='fname').AndReturn(tinfo)
vdi_through_dev.tarfile.open(fileobj=outf, mode='w|gz').AndReturn(
fake_context(tfile))
producer._open_file('fpath', 'rb').AndReturn(fake_context(inf))
tfile.addfile(tinfo, fileobj=inf)
outf.close()
self.mox.ReplayAll()
producer.start()
self.assertEqual(100, tinfo.size)
def test_get_metadata(self):
producer = vdi_through_dev.TarGzProducer('devpath', 'writefile',
'100', 'fname')
self.assertEqual({
'disk_format': 'raw',
'container_format': 'tgz'},
producer.get_metadata())
|
CampyDB/campy-server | refs/heads/master | app/api/resources/user.py | 1 | from datetime import datetime
from flask import g
from flask.ext.restful import Resource, reqparse
from app import session, logger
from app.api import authorized
from app.models import User
__author__ = 'peter'
class UserAPI(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument('password', type=str, location='json')
self.reqparse.add_argument('email', type=str, location='json')
self.reqparse.add_argument('new_username', type=str, location='json')
super(UserAPI, self).__init__()
@staticmethod
def user_info(user):
assert isinstance(user, User)
return {'name': user.name,
'created_on': user.created_on.isoformat(),
'last_seen': user.last_seen.isoformat(),
'samples': [x.name for x in user.samples.all()],
'role': user.role.value,
'email': user.email}
@authorized(public_auth_required=False)
def get(self, user_name):
user = session.query(User).filter(User.name == user_name).first()
if user is None:
logger.error('GET User "{}" not found in DB'.format(user_name))
return {'error': 'Specified user does not exist'}, 404
if user != g.user:
logger.error('GET Auth user {} does not match specified user {}'.format(user, g.user))
return {'error': 'Authenticated user does not match specified user'}, 403
assert isinstance(user, User)
logger.info('GET User {}; username {}'.format(user, user_name))
# Update when User last seen
user.last_seen = datetime.utcnow()
session.add(user)
session.commit()
return UserAPI.user_info(user)
|
samuelclay/NewsBlur | refs/heads/master | vendor/paypalapi/interface.py | 18 | # coding=utf-8
"""
The end developer will do most of their work with the PayPalInterface class found
in this module. Configuration, querying, and manipulation can all be done
with it.
"""
import types
import logging
from pprint import pformat
import warnings
import requests
from vendor.paypalapi.settings import PayPalConfig
from vendor.paypalapi.response import PayPalResponse
from vendor.paypalapi.response_list import PayPalResponseList
from vendor.paypalapi.exceptions import (PayPalError,
PayPalAPIResponseError,
PayPalConfigError)
from vendor.paypalapi.compat import is_py3
if is_py3:
#noinspection PyUnresolvedReferences
from urllib.parse import urlencode
else:
from urllib import urlencode
logger = logging.getLogger('paypal.interface')
class PayPalInterface(object):
__credentials = ['USER', 'PWD', 'SIGNATURE', 'SUBJECT']
"""
The end developers will do 95% of their work through this class. API
queries, configuration, etc, all go through here. See the __init__ method
for config related details.
"""
def __init__(self, config=None, **kwargs):
"""
Constructor, which passes all config directives to the config class
via kwargs. For example:
paypal = PayPalInterface(API_USERNAME='somevalue')
Optionally, you may pass a 'config' kwarg to provide your own
PayPalConfig object.
"""
if config:
# User provided their own PayPalConfig object.
self.config = config
else:
# Take the kwargs and stuff them in a new PayPalConfig object.
self.config = PayPalConfig(**kwargs)
def _encode_utf8(self, **kwargs):
"""
UTF8 encodes all of the NVP values.
"""
if is_py3:
# This is only valid for Python 2. In Python 3, unicode is
# everywhere (yay).
return kwargs
unencoded_pairs = kwargs
for i in unencoded_pairs.keys():
#noinspection PyUnresolvedReferences
if isinstance(unencoded_pairs[i], types.UnicodeType):
unencoded_pairs[i] = unencoded_pairs[i].encode('utf-8')
return unencoded_pairs
def _check_required(self, requires, **kwargs):
"""
Checks kwargs for the values specified in 'requires', which is a tuple
of strings. These strings are the NVP names of the required values.
"""
for req in requires:
# PayPal api is never mixed-case.
if req.lower() not in kwargs and req.upper() not in kwargs:
raise PayPalError('missing required : %s' % req)
def _sanitize_locals(self, data):
"""
Remove the 'self' key in locals()
It's more explicit to do it in one function
"""
if 'self' in data:
data = data.copy()
del data['self']
return data
def _call(self, method, **kwargs):
"""
Wrapper method for executing all API commands over HTTP. This method is
further used to implement wrapper methods listed here:
https://www.x.com/docs/DOC-1374
``method`` must be a supported NVP method listed at the above address.
``kwargs`` the actual call parameters
"""
post_params = self._get_call_params(method, **kwargs)
payload = post_params['data']
api_endpoint = post_params['url']
# This shows all of the key/val pairs we're sending to PayPal.
if logger.isEnabledFor(logging.DEBUG):
logger.debug('PayPal NVP Query Key/Vals:\n%s' % pformat(payload))
http_response = requests.post(**post_params)
response = PayPalResponse(http_response.text, self.config)
logger.debug('PayPal NVP API Endpoint: %s' % api_endpoint)
if not response.success:
logger.error('A PayPal API error was encountered.')
safe_payload = dict((p, 'X' * len(v) if p in \
self.__credentials else v) for (p, v) in payload.items())
logger.error('PayPal NVP Query Key/Vals (credentials removed):' \
'\n%s' % pformat(safe_payload))
logger.error('PayPal NVP Query Response')
logger.error(response)
raise PayPalAPIResponseError(response)
return response
def _get_call_params(self, method, **kwargs):
"""
Returns the prepared call parameters. Mind, these will be keyword
arguments to ``requests.post``.
``method`` the NVP method
``kwargs`` the actual call parameters
"""
payload = {'METHOD': method,
'VERSION': self.config.API_VERSION}
certificate = None
if self.config.API_AUTHENTICATION_MODE == "3TOKEN":
payload['USER'] = self.config.API_USERNAME
payload['PWD'] = self.config.API_PASSWORD
payload['SIGNATURE'] = self.config.API_SIGNATURE
elif self.config.API_AUTHENTICATION_MODE == "CERTIFICATE":
payload['USER'] = self.config.API_USERNAME
payload['PWD'] = self.config.API_PASSWORD
certificate = (self.config.API_CERTIFICATE_FILENAME,
self.config.API_KEY_FILENAME)
elif self.config.API_AUTHENTICATION_MODE == "UNIPAY":
payload['SUBJECT'] = self.config.UNIPAY_SUBJECT
none_configs = [config for config, value in payload.iteritems()\
if value is None]
if none_configs:
raise PayPalConfigError(
"Config(s) %s cannot be None. Please, check this "
"interface's config." % none_configs)
# all keys in the payload must be uppercase
for key, value in kwargs.items():
payload[key.upper()] = value
return {'data': payload,
'cert': certificate,
'url': self.config.API_ENDPOINT,
'timeout': self.config.HTTP_TIMEOUT,
'verify': self.config.API_CA_CERTS}
def address_verify(self, email, street, zip):
"""Shortcut for the AddressVerify method.
``email``::
Email address of a PayPal member to verify.
Maximum string length: 255 single-byte characters
Input mask: ?@?.??
``street``::
First line of the billing or shipping postal address to verify.
To pass verification, the value of Street must match the first three
single-byte characters of a postal address on file for the PayPal member.
Maximum string length: 35 single-byte characters.
Alphanumeric plus - , . ‘ # \
Whitespace and case of input value are ignored.
``zip``::
Postal code to verify.
To pass verification, the value of Zip mustmatch the first five
single-byte characters of the postal code of the verified postal
address for the verified PayPal member.
Maximumstring length: 16 single-byte characters.
Whitespace and case of input value are ignored.
"""
args = self._sanitize_locals(locals())
return self._call('AddressVerify', **args)
def create_recurring_payments_profile(self, **kwargs):
"""Shortcut for the CreateRecurringPaymentsProfile method.
Currently, this method only supports the Direct Payment flavor.
It requires standard credit card information and a few additional
parameters related to the billing. e.g.:
profile_info = {
# Credit card information
'creditcardtype': 'Visa',
'acct': '4812177017895760',
'expdate': '102015',
'cvv2': '123',
'firstname': 'John',
'lastname': 'Doe',
'street': '1313 Mockingbird Lane',
'city': 'Beverly Hills',
'state': 'CA',
'zip': '90110',
'countrycode': 'US',
'currencycode': 'USD',
# Recurring payment information
'profilestartdate': '2010-10-25T0:0:0',
'billingperiod': 'Month',
'billingfrequency': '6',
'amt': '10.00',
'desc': '6 months of our product.'
}
response = create_recurring_payments_profile(**profile_info)
The above NVPs compose the bare-minimum request for creating a
profile. For the complete list of parameters, visit this URI:
https://www.x.com/docs/DOC-1168
"""
return self._call('CreateRecurringPaymentsProfile', **kwargs)
def do_authorization(self, transactionid, amt):
"""Shortcut for the DoAuthorization method.
Use the TRANSACTIONID from DoExpressCheckoutPayment for the
``transactionid``. The latest version of the API does not support the
creation of an Order from `DoDirectPayment`.
The `amt` should be the same as passed to `DoExpressCheckoutPayment`.
Flow for a payment involving a `DoAuthorization` call::
1. One or many calls to `SetExpressCheckout` with pertinent order
details, returns `TOKEN`
1. `DoExpressCheckoutPayment` with `TOKEN`, `PAYMENTACTION` set to
Order, `AMT` set to the amount of the transaction, returns
`TRANSACTIONID`
1. `DoAuthorization` with `TRANSACTIONID` and `AMT` set to the
amount of the transaction.
1. `DoCapture` with the `AUTHORIZATIONID` (the `TRANSACTIONID`
returned by `DoAuthorization`)
"""
args = self._sanitize_locals(locals())
return self._call('DoAuthorization', **args)
def do_capture(self, authorizationid, amt, completetype='Complete', **kwargs):
"""Shortcut for the DoCapture method.
Use the TRANSACTIONID from DoAuthorization, DoDirectPayment or
DoExpressCheckoutPayment for the ``authorizationid``.
The `amt` should be the same as the authorized transaction.
"""
kwargs.update(self._sanitize_locals(locals()))
return self._call('DoCapture', **kwargs)
def do_direct_payment(self, paymentaction="Sale", **kwargs):
"""Shortcut for the DoDirectPayment method.
``paymentaction`` could be 'Authorization' or 'Sale'
To issue a Sale immediately::
charge = {
'amt': '10.00',
'creditcardtype': 'Visa',
'acct': '4812177017895760',
'expdate': '012010',
'cvv2': '962',
'firstname': 'John',
'lastname': 'Doe',
'street': '1 Main St',
'city': 'San Jose',
'state': 'CA',
'zip': '95131',
'countrycode': 'US',
'currencycode': 'USD',
}
direct_payment("Sale", **charge)
Or, since "Sale" is the default:
direct_payment(**charge)
To issue an Authorization, simply pass "Authorization" instead of "Sale".
You may also explicitly set ``paymentaction`` as a keyword argument:
...
direct_payment(paymentaction="Sale", **charge)
"""
kwargs.update(self._sanitize_locals(locals()))
return self._call('DoDirectPayment', **kwargs)
def do_void(self, **kwargs):
"""Shortcut for the DoVoid method.
Use the TRANSACTIONID from DoAuthorization, DoDirectPayment or
DoExpressCheckoutPayment for the ``AUTHORIZATIONID``.
Required Kwargs
---------------
* AUTHORIZATIONID
"""
return self._call('DoVoid', **kwargs)
def get_express_checkout_details(self, **kwargs):
"""Shortcut for the GetExpressCheckoutDetails method.
Required Kwargs
---------------
* TOKEN
"""
return self._call('GetExpressCheckoutDetails', **kwargs)
def get_transaction_details(self, **kwargs):
"""Shortcut for the GetTransactionDetails method.
Use the TRANSACTIONID from DoAuthorization, DoDirectPayment or
DoExpressCheckoutPayment for the ``transactionid``.
Required Kwargs
---------------
* TRANSACTIONID
"""
return self._call('GetTransactionDetails', **kwargs)
def transaction_search(self, **kwargs):
"""Shortcut for the TransactionSearch method.
Returns a PayPalResponseList object, which merges the L_ syntax list
to a list of dictionaries with properly named keys.
Note that the API will limit returned transactions to 100.
Required Kwargs
---------------
* STARTDATE
Optional Kwargs
---------------
STATUS = one of ['Pending','Processing','Success','Denied','Reversed']
"""
plain = self._call('TransactionSearch', **kwargs)
return PayPalResponseList(plain.raw, self.config)
def set_express_checkout(self, **kwargs):
"""Start an Express checkout.
You'll want to use this in conjunction with
:meth:`generate_express_checkout_redirect_url` to create a payment,
then figure out where to redirect the user to for them to
authorize the payment on PayPal's website.
Required Kwargs
---------------
* PAYMENTREQUEST_0_AMT
* PAYMENTREQUEST_0_PAYMENTACTION
* RETURNURL
* CANCELURL
"""
return self._call('SetExpressCheckout', **kwargs)
def refund_transaction(self, transactionid=None, payerid=None, **kwargs):
"""Shortcut for RefundTransaction method.
Note new API supports passing a PayerID instead of a transaction id, exactly one must be provided.
Optional:
INVOICEID
REFUNDTYPE
AMT
CURRENCYCODE
NOTE
RETRYUNTIL
REFUNDSOURCE
MERCHANTSTOREDETAILS
REFUNDADVICE
REFUNDITEMDETAILS
MSGSUBID
MERCHANSTOREDETAILS has two fields:
STOREID
TERMINALID
"""
#this line seems like a complete waste of time... kwargs should not be populated
if (transactionid is None) and (payerid is None):
raise PayPalError('RefundTransaction requires either a transactionid or a payerid')
if (transactionid is not None) and (payerid is not None):
raise PayPalError('RefundTransaction requires only one of transactionid %s and payerid %s' % (transactionid, payerid))
if transactionid is not None:
kwargs['TRANSACTIONID'] = transactionid
else:
kwargs['PAYERID'] = payerid
return self._call('RefundTransaction', **kwargs)
def do_express_checkout_payment(self, **kwargs):
"""Finishes an Express checkout.
TOKEN is the token that was returned earlier by
:meth:`set_express_checkout`. This identifies the transaction.
Required
--------
* TOKEN
* PAYMENTACTION
* PAYERID
* AMT
"""
return self._call('DoExpressCheckoutPayment', **kwargs)
def generate_express_checkout_redirect_url(self, token, useraction=None):
"""Returns the URL to redirect the user to for the Express checkout.
Express Checkouts must be verified by the customer by redirecting them
to the PayPal website. Use the token returned in the response from
:meth:`set_express_checkout` with this function to figure out where
to redirect the user to.
The button text on the PayPal page can be controlled via `useraction`.
The documented possible values are `commit` and `continue`. However,
any other value will only result in a warning.
:param str token: The unique token identifying this transaction.
:param str useraction: Control the button text on the PayPal page.
:rtype: str
:returns: The URL to redirect the user to for approval.
"""
url_vars = (self.config.PAYPAL_URL_BASE, token)
url = "%s?cmd=_express-checkout&token=%s" % url_vars
if useraction:
if not useraction.lower() in ('commit', 'continue'):
warnings.warn('useraction=%s is not documented' % useraction,
RuntimeWarning)
url += '&useraction=%s' % useraction
return url
def generate_cart_upload_redirect_url(self, **kwargs):
"""https://www.sandbox.paypal.com/webscr
?cmd=_cart
&upload=1
"""
required_vals = ('business', 'item_name_1', 'amount_1', 'quantity_1')
self._check_required(required_vals, **kwargs)
url = "%s?cmd=_cart&upload=1" % self.config.PAYPAL_URL_BASE
additional = self._encode_utf8(**kwargs)
additional = urlencode(additional)
return url + "&" + additional
def get_recurring_payments_profile_details(self, profileid):
"""Shortcut for the GetRecurringPaymentsProfile method.
This returns details for a recurring payment plan. The ``profileid`` is
a value included in the response retrieved by the function
``create_recurring_payments_profile``. The profile details include the
data provided when the profile was created as well as default values
for ignored fields and some pertinent stastics.
e.g.:
response = create_recurring_payments_profile(**profile_info)
profileid = response.PROFILEID
details = get_recurring_payments_profile(profileid)
The response from PayPal is somewhat self-explanatory, but for a
description of each field, visit the following URI:
https://www.x.com/docs/DOC-1194
"""
args = self._sanitize_locals(locals())
return self._call('GetRecurringPaymentsProfileDetails', **args)
def manage_recurring_payments_profile_status(self, profileid, action, note=None):
"""Shortcut to the ManageRecurringPaymentsProfileStatus method.
``profileid`` is the same profile id used for getting profile details.
``action`` should be either 'Cancel', 'Suspend', or 'Reactivate'.
``note`` is optional and is visible to the user. It contains the reason for the change in status.
"""
args = self._sanitize_locals(locals())
if not note:
del args['note']
return self._call('ManageRecurringPaymentsProfileStatus', **args)
def update_recurring_payments_profile(self, profileid, **kwargs):
"""Shortcut to the UpdateRecurringPaymentsProfile method.
``profileid`` is the same profile id used for getting profile details.
The keyed arguments are data in the payment profile which you wish to
change. The profileid does not change. Anything else will take the new
value. Most of, though not all of, the fields available are shared
with creating a profile, but for the complete list of parameters, you
can visit the following URI:
https://www.x.com/docs/DOC-1212
"""
kwargs.update(self._sanitize_locals(locals()))
return self._call('UpdateRecurringPaymentsProfile', **kwargs)
def bm_create_button(self, **kwargs):
"""Shortcut to the BMButtonSearch method.
See the docs for details on arguments:
https://cms.paypal.com/mx/cgi-bin/?cmd=_render-content&content_ID=developer/e_howto_api_nvp_BMCreateButton
The L_BUTTONVARn fields are especially important, so make sure to
read those and act accordingly. See unit tests for some examples.
"""
kwargs.update(self._sanitize_locals(locals()))
return self._call('BMCreateButton', **kwargs)
|
AbrahmAB/sugar | refs/heads/master | extensions/cpsection/background/view.py | 3 | # Copyright (C) 2012 Agustin Zubiaga <[email protected]>
# Copyright (C) 2013 Sugar Labs
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GObject
from gi.repository import GdkPixbuf
from sugar3.graphics import style
from sugar3.graphics.radiotoolbutton import RadioToolButton
from jarabe.controlpanel.sectionview import SectionView
from gettext import gettext as _
class Background(SectionView):
def __init__(self, model, alerts=None):
SectionView.__init__(self)
self._model = model
self._images_loaded = False
self._append_to_store_sid = None
self.connect('realize', self.__realize_cb)
self.connect('unrealize', self.__unrealize_cb)
self.set_border_width(style.DEFAULT_SPACING * 2)
self.set_spacing(style.DEFAULT_SPACING)
label_box = Gtk.Box()
label_bg = Gtk.Label(label=_('Select a background:'))
label_bg.modify_fg(Gtk.StateType.NORMAL,
style.COLOR_SELECTION_GREY.get_gdk_color())
label_bg.show()
label_box.pack_start(label_bg, False, True, 0)
label_box.show()
self.pack_start(label_box, False, True, 1)
clear_button = Gtk.Button()
clear_button.set_label(_('Clear background'))
clear_button.connect('clicked', self._clear_clicked_cb)
clear_button.show()
self.pack_end(clear_button, False, True, 0)
scrolled_window = Gtk.ScrolledWindow()
scrolled_window.set_shadow_type(Gtk.ShadowType.ETCHED_IN)
scrolled_window.set_policy(Gtk.PolicyType.AUTOMATIC,
Gtk.PolicyType.AUTOMATIC)
self.pack_start(scrolled_window, True, True, 0)
scrolled_window.show()
self._store = Gtk.ListStore(GdkPixbuf.Pixbuf, str)
self._icon_view = Gtk.IconView.new_with_model(self._store)
self._icon_view.set_selection_mode(Gtk.SelectionMode.SINGLE)
self._icon_view.connect('selection-changed', self._background_selected)
self._icon_view.set_pixbuf_column(0)
self._icon_view.grab_focus()
scrolled_window.add(self._icon_view)
self._icon_view.show()
alpha = self._model.get_background_alpha_level()
alpha_box = Gtk.HBox()
alpha_buttons = []
alpha_icons = [
[1.0, 'network-wireless-000'],
[0.8, 'network-wireless-020'],
[0.6, 'network-wireless-040'],
[0.4, 'network-wireless-060'],
[0.2, 'network-wireless-080']]
for value, icon_name in alpha_icons:
if len(alpha_buttons) > 0:
button = RadioToolButton(group=alpha_buttons[0])
else:
button = RadioToolButton(group=None)
button.set_icon_name(icon_name)
button.value = value
button.props.active = value == alpha
button.show()
alpha_box.pack_start(button, False, True, 0)
alpha_buttons.append(button)
for button in alpha_buttons:
button.connect('toggled', self._set_alpha_cb)
alpha_alignment = Gtk.Alignment()
alpha_alignment.set(0.5, 0, 0, 0)
alpha_alignment.add(alpha_box)
alpha_box.show()
self.pack_start(alpha_alignment, False, False, 0)
alpha_alignment.show()
self._paths_list = []
file_paths = []
for directory in self._model.BACKGROUNDS_DIRS:
if directory is not None and os.path.exists(directory):
for root, dirs, files in os.walk(directory):
for file_ in files:
file_paths.append(os.path.join(root, file_))
self._append_to_store(file_paths)
self.setup()
def _append_to_store(self, file_paths):
if file_paths:
file_path = file_paths.pop()
pixbuf = None
try:
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(
file_path, style.XLARGE_ICON_SIZE,
style.XLARGE_ICON_SIZE)
except GObject.GError:
pass
else:
self._store.append([pixbuf, file_path])
self._paths_list.append(file_path)
self._append_to_store_sid = GObject.idle_add(self._append_to_store,
file_paths)
else:
self._select_background()
self._images_loaded = True
window = self.get_window()
if window is not None:
window.set_cursor(None)
self._append_to_store_sid = None
def _cancel_append_to_store(self):
if self._append_to_store_sid is not None:
GObject.source_remove(self._append_to_store_sid)
self._append_to_store_sid = None
def __realize_cb(self, widget):
if self._images_loaded:
self.get_window().set_cursor(None)
else:
self.get_window().set_cursor(Gdk.Cursor.new(Gdk.CursorType.WATCH))
def __unrealize_cb(self, widget):
self.get_window().set_cursor(None)
def _set_alpha_cb(self, widget):
if widget.get_active():
self._model.set_background_alpha_level(widget.value)
def _get_selected_path(self, widget):
try:
iter_ = self._store.get_iter(widget.get_selected_items()[0])
image_path = self._store.get(iter_, 1)[0]
return image_path, iter_
except:
return None
def _background_selected(self, widget):
selected = self._get_selected_path(widget)
if selected is None:
return
image_path, _iter = selected
iter_ = self._store.get_iter(widget.get_selected_items()[0])
image_path = self._store.get(iter_, 1)[0]
self._model.set_background_image_path(image_path)
def _select_background(self):
background = self._model.get_background_image_path()
if background in self._paths_list:
self._icon_view.select_path(
Gtk.TreePath.new_from_string(
'%s' % self._paths_list.index(background)))
def _clear_clicked_cb(self, widget, event=None):
self._model.set_background_image_path(None)
def setup(self):
self.show_all()
def apply(self):
self._cancel_append_to_store()
def undo(self):
self._model.undo()
self._cancel_append_to_store()
|
hmgaudecker/econ-project-templates | refs/heads/master | docs/bld/example/python/python_example/.mywaflib/waflib/extras/waf_xattr.py | 55 | #! /usr/bin/env python
# encoding: utf-8
"""
Use extended attributes instead of database files
1. Input files will be made writable
2. This is only for systems providing extended filesystem attributes
3. By default, hashes are calculated only if timestamp/size change (HASH_CACHE below)
4. The module enables "deep_inputs" on all tasks by propagating task signatures
5. This module also skips task signature comparisons for task code changes due to point 4.
6. This module is for Python3/Linux only, but it could be extended to Python2/other systems
using the xattr library
7. For projects in which tasks always declare output files, it should be possible to
store the rest of build context attributes on output files (imp_sigs, raw_deps and node_deps)
but this is not done here
On a simple C++ project benchmark, the variations before and after adding waf_xattr.py were observed:
total build time: 20s -> 22s
no-op build time: 2.4s -> 1.8s
pickle file size: 2.9MB -> 2.6MB
"""
import os
from waflib import Logs, Node, Task, Utils, Errors
from waflib.Task import SKIP_ME, RUN_ME, CANCEL_ME, ASK_LATER, SKIPPED, MISSING
HASH_CACHE = True
SIG_VAR = 'user.waf.sig'
SEP = ','.encode()
TEMPLATE = '%b%d,%d'.encode()
try:
PermissionError
except NameError:
PermissionError = IOError
def getxattr(self):
return os.getxattr(self.abspath(), SIG_VAR)
def setxattr(self, val):
os.setxattr(self.abspath(), SIG_VAR, val)
def h_file(self):
try:
ret = getxattr(self)
except OSError:
if HASH_CACHE:
st = os.stat(self.abspath())
mtime = st.st_mtime
size = st.st_size
else:
if len(ret) == 16:
# for build directory files
return ret
if HASH_CACHE:
# check if timestamp and mtime match to avoid re-hashing
st = os.stat(self.abspath())
mtime, size = ret[16:].split(SEP)
if int(1000 * st.st_mtime) == int(mtime) and st.st_size == int(size):
return ret[:16]
ret = Utils.h_file(self.abspath())
if HASH_CACHE:
val = TEMPLATE % (ret, int(1000 * st.st_mtime), int(st.st_size))
try:
setxattr(self, val)
except PermissionError:
os.chmod(self.abspath(), st.st_mode | 128)
setxattr(self, val)
return ret
def runnable_status(self):
bld = self.generator.bld
if bld.is_install < 0:
return SKIP_ME
for t in self.run_after:
if not t.hasrun:
return ASK_LATER
elif t.hasrun < SKIPPED:
# a dependency has an error
return CANCEL_ME
# first compute the signature
try:
new_sig = self.signature()
except Errors.TaskNotReady:
return ASK_LATER
if not self.outputs:
# compare the signature to a signature computed previously
# this part is only for tasks with no output files
key = self.uid()
try:
prev_sig = bld.task_sigs[key]
except KeyError:
Logs.debug('task: task %r must run: it was never run before or the task code changed', self)
return RUN_ME
if new_sig != prev_sig:
Logs.debug('task: task %r must run: the task signature changed', self)
return RUN_ME
# compare the signatures of the outputs to make a decision
for node in self.outputs:
try:
sig = node.h_file()
except EnvironmentError:
Logs.debug('task: task %r must run: an output node does not exist', self)
return RUN_ME
if sig != new_sig:
Logs.debug('task: task %r must run: an output node is stale', self)
return RUN_ME
return (self.always_run and RUN_ME) or SKIP_ME
def post_run(self):
bld = self.generator.bld
sig = self.signature()
for node in self.outputs:
if not node.exists():
self.hasrun = MISSING
self.err_msg = '-> missing file: %r' % node.abspath()
raise Errors.WafError(self.err_msg)
os.setxattr(node.abspath(), 'user.waf.sig', sig)
if not self.outputs:
# only for task with no outputs
bld.task_sigs[self.uid()] = sig
if not self.keep_last_cmd:
try:
del self.last_cmd
except AttributeError:
pass
try:
os.getxattr
except AttributeError:
pass
else:
h_file.__doc__ = Node.Node.h_file.__doc__
# keep file hashes as file attributes
Node.Node.h_file = h_file
# enable "deep_inputs" on all tasks
Task.Task.runnable_status = runnable_status
Task.Task.post_run = post_run
Task.Task.sig_deep_inputs = Utils.nada
|
smaffulli/libcloud | refs/heads/trunk | setup.py | 6 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import doctest
from setuptools import setup
from distutils.core import Command
from unittest import TextTestRunner, TestLoader
from glob import glob
from os.path import splitext, basename, join as pjoin
try:
import epydoc # NOQA
has_epydoc = True
except ImportError:
has_epydoc = False
import libcloud.utils
from libcloud.utils.dist import get_packages, get_data_files
libcloud.utils.SHOW_DEPRECATION_WARNING = False
# Different versions of python have different requirements. We can't use
# libcloud.utils.py3 here because it relies on backports dependency being
# installed / available
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY2_pre_25 = PY2 and sys.version_info < (2, 5)
PY2_pre_26 = PY2 and sys.version_info < (2, 6)
PY2_pre_27 = PY2 and sys.version_info < (2, 7)
PY2_pre_279 = PY2 and sys.version_info < (2, 7, 9)
PY3_pre_32 = PY3 and sys.version_info < (3, 2)
HTML_VIEWSOURCE_BASE = 'https://svn.apache.org/viewvc/libcloud/trunk'
PROJECT_BASE_DIR = 'http://libcloud.apache.org'
TEST_PATHS = ['libcloud/test', 'libcloud/test/common', 'libcloud/test/compute',
'libcloud/test/storage', 'libcloud/test/loadbalancer',
'libcloud/test/dns']
DOC_TEST_MODULES = ['libcloud.compute.drivers.dummy',
'libcloud.storage.drivers.dummy',
'libcloud.dns.drivers.dummy']
SUPPORTED_VERSIONS = ['2.5', '2.6', '2.7', 'PyPy', '3.x']
TEST_REQUIREMENTS = [
'mock'
]
if PY2_pre_279 or PY3_pre_32:
TEST_REQUIREMENTS.append('backports.ssl_match_hostname')
if PY2_pre_27:
unittest2_required = True
else:
unittest2_required = False
if PY2_pre_25:
version = '.'.join([str(x) for x in sys.version_info[:3]])
print('Version ' + version + ' is not supported. Supported versions are ' +
', '.join(SUPPORTED_VERSIONS))
sys.exit(1)
def read_version_string():
version = None
sys.path.insert(0, pjoin(os.getcwd()))
from libcloud import __version__
version = __version__
sys.path.pop(0)
return version
def forbid_publish():
argv = sys.argv
if 'upload'in argv:
print('You shouldn\'t use upload command to upload a release to PyPi. '
'You need to manually upload files generated using release.sh '
'script.\n'
'For more information, see "Making a release section" in the '
'documentation')
sys.exit(1)
class TestCommand(Command):
description = "run test suite"
user_options = []
def initialize_options(self):
THIS_DIR = os.path.abspath(os.path.split(__file__)[0])
sys.path.insert(0, THIS_DIR)
for test_path in TEST_PATHS:
sys.path.insert(0, pjoin(THIS_DIR, test_path))
self._dir = os.getcwd()
def finalize_options(self):
pass
def run(self):
for module_name in TEST_REQUIREMENTS:
try:
__import__(module_name)
except ImportError:
print('Missing "%s" library. %s is library is needed '
'to run the tests. You can install it using pip: '
'pip install %s' % (module_name, module_name,
module_name))
sys.exit(1)
if unittest2_required:
try:
import unittest2
unittest2
except ImportError:
print('Python version: %s' % (sys.version))
print('Missing "unittest2" library. unittest2 is library is '
'needed to run the tests. You can install it using pip: '
'pip install unittest2')
sys.exit(1)
status = self._run_tests()
sys.exit(status)
def _run_tests(self):
secrets_current = pjoin(self._dir, 'libcloud/test', 'secrets.py')
secrets_dist = pjoin(self._dir, 'libcloud/test', 'secrets.py-dist')
if not os.path.isfile(secrets_current):
print("Missing " + secrets_current)
print("Maybe you forgot to copy it from -dist:")
print("cp libcloud/test/secrets.py-dist libcloud/test/secrets.py")
sys.exit(1)
mtime_current = os.path.getmtime(secrets_current)
mtime_dist = os.path.getmtime(secrets_dist)
if mtime_dist > mtime_current:
print("It looks like test/secrets.py file is out of date.")
print("Please copy the new secrets.py-dist file over otherwise" +
" tests might fail")
if PY2_pre_26:
missing = []
# test for dependencies
try:
import simplejson
simplejson # silence pyflakes
except ImportError:
missing.append("simplejson")
try:
import ssl
ssl # silence pyflakes
except ImportError:
missing.append("ssl")
if missing:
print("Missing dependencies: " + ", ".join(missing))
sys.exit(1)
testfiles = []
for test_path in TEST_PATHS:
for t in glob(pjoin(self._dir, test_path, 'test_*.py')):
testfiles.append('.'.join(
[test_path.replace('/', '.'), splitext(basename(t))[0]]))
tests = TestLoader().loadTestsFromNames(testfiles)
for test_module in DOC_TEST_MODULES:
tests.addTests(doctest.DocTestSuite(test_module))
t = TextTestRunner(verbosity=2)
res = t.run(tests)
return not res.wasSuccessful()
class ApiDocsCommand(Command):
description = "generate API documentation"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if not has_epydoc:
raise RuntimeError('Missing "epydoc" package!')
os.system(
'pydoctor'
' --add-package=libcloud'
' --project-name=libcloud'
' --make-html'
' --html-viewsource-base="%s"'
' --project-base-dir=`pwd`'
' --project-url="%s"'
% (HTML_VIEWSOURCE_BASE, PROJECT_BASE_DIR))
class CoverageCommand(Command):
description = "run test suite and generate coverage report"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import coverage
cov = coverage.coverage(config_file='.coveragerc')
cov.start()
tc = TestCommand(self.distribution)
tc._run_tests()
cov.stop()
cov.save()
cov.html_report()
forbid_publish()
install_requires = []
if PY2_pre_26:
install_requires.extend(['ssl', 'simplejson'])
if PY2_pre_279 or PY3_pre_32:
install_requires.append('backports.ssl_match_hostname')
setup(
name='apache-libcloud',
version=read_version_string(),
description='A standard Python library that abstracts away differences' +
' among multiple cloud provider APIs. For more information' +
' and documentation, please see http://libcloud.apache.org',
author='Apache Software Foundation',
author_email='[email protected]',
install_requires=install_requires,
packages=get_packages('libcloud'),
package_dir={
'libcloud': 'libcloud',
},
package_data={'libcloud': get_data_files('libcloud', parent='libcloud')},
license='Apache License (2.0)',
url='http://libcloud.apache.org/',
cmdclass={
'test': TestCommand,
'apidocs': ApiDocsCommand,
'coverage': CoverageCommand
},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython'
'Programming Language :: Python :: Implementation :: PyPy']
)
|
md1024/rams | refs/heads/init_master | uber/api.py | 1 | from uber.common import *
__version__ = 'v0.1'
attendee_fields = [
'full_name', 'first_name', 'last_name', 'email', 'zip_code', 'cellphone', 'ec_phone', 'badge_status_label', 'checked_in',
'badge_type_label', 'ribbon_label', 'staffing', 'is_dept_head', 'assigned_depts_labels', 'weighted_hours', 'worked_hours',
'badge_num'
]
fields = dict({
'food_restrictions': {
'sandwich_pref_labels': True,
'standard_labels': True,
'freeform': True
},
'shifts': {
'worked_label': True,
'job': ['type_label', 'location_label', 'name', 'description', 'start_time', 'end_time', 'extra15']
}
}, **{field: True for field in attendee_fields})
class AttendeeLookup:
def lookup(self, badge_num):
with Session() as session:
attendee = session.query(Attendee).filter_by(badge_num=badge_num).first()
return attendee.to_dict(fields) if attendee else {'error': 'No attendee found with Badge #{}'.format(badge_num)}
def search(self, query):
with Session() as session:
return [a.to_dict(fields) for a in session.search(query).all()]
services.register(AttendeeLookup(), 'attendee')
job_fields = dict({
'name': True,
'description': True,
'location': True,
'start_time': True,
'end_time': True,
'duration': True,
'shifts': {
'attendee': {
'badge_num': True,
'full_name': True,
'first_name': True,
'last_name': True,
'email': True,
'cellphone': True,
'badge_printed_name': True
}
}
})
class JobLookup:
def lookup(self, location):
with Session() as session:
columns = Job.__table__.columns
location_column = columns['location']
label_lookup = {val: key for key, val in location_column.type.choices.items()}
return [job.to_dict(job_fields) for job in session.query(Job).filter_by(location=label_lookup[location]).all()]
services.register(JobLookup(), 'shifts')
class DepartmentLookup:
def list(self):
with Session() as session:
output = {}
for dept in c.JOB_LOCATION_VARS:
output[dept] = dict(c.JOB_LOCATION_OPTS)[getattr(c, dept)]
return output
services.register(DepartmentLookup(), 'dept')
config_fields = [
'EVENT_NAME',
'ORGANIZATION_NAME',
'YEAR',
'EPOCH',
'EVENT_VENUE',
'EVENT_VENUE_ADDRESS',
'AT_THE_CON',
'POST_CON',
]
class ConfigLookup:
def info(self):
output = {
'API_VERSION': __version__
}
for field in config_fields:
output[field] = getattr(c, field)
return output
def lookup(self, field):
if field.upper() in config_fields:
return getattr(c, field.upper())
services.register(ConfigLookup(), 'config')
|
wkschwartz/django | refs/heads/stable/3.2.x | django/core/management/commands/sqlmigrate.py | 29 | from django.apps import apps
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS, connections
from django.db.migrations.loader import AmbiguityError, MigrationLoader
class Command(BaseCommand):
help = "Prints the SQL statements for the named migration."
output_transaction = True
def add_arguments(self, parser):
parser.add_argument('app_label', help='App label of the application containing the migration.')
parser.add_argument('migration_name', help='Migration name to print the SQL for.')
parser.add_argument(
'--database', default=DEFAULT_DB_ALIAS,
help='Nominates a database to create SQL for. Defaults to the "default" database.',
)
parser.add_argument(
'--backwards', action='store_true',
help='Creates SQL to unapply the migration, rather than to apply it',
)
def execute(self, *args, **options):
# sqlmigrate doesn't support coloring its output but we need to force
# no_color=True so that the BEGIN/COMMIT statements added by
# output_transaction don't get colored either.
options['no_color'] = True
return super().execute(*args, **options)
def handle(self, *args, **options):
# Get the database we're operating from
connection = connections[options['database']]
# Load up an loader to get all the migration data, but don't replace
# migrations.
loader = MigrationLoader(connection, replace_migrations=False)
# Resolve command-line arguments into a migration
app_label, migration_name = options['app_label'], options['migration_name']
# Validate app_label
try:
apps.get_app_config(app_label)
except LookupError as err:
raise CommandError(str(err))
if app_label not in loader.migrated_apps:
raise CommandError("App '%s' does not have migrations" % app_label)
try:
migration = loader.get_migration_by_prefix(app_label, migration_name)
except AmbiguityError:
raise CommandError("More than one migration matches '%s' in app '%s'. Please be more specific." % (
migration_name, app_label))
except KeyError:
raise CommandError("Cannot find a migration matching '%s' from app '%s'. Is it in INSTALLED_APPS?" % (
migration_name, app_label))
target = (app_label, migration.name)
# Show begin/end around output for atomic migrations, if the database
# supports transactional DDL.
self.output_transaction = migration.atomic and connection.features.can_rollback_ddl
# Make a plan that represents just the requested migrations and show SQL
# for it
plan = [(loader.graph.nodes[target], options['backwards'])]
sql_statements = loader.collect_sql(plan)
if not sql_statements and options['verbosity'] >= 1:
self.stderr.write('No operations found.')
return '\n'.join(sql_statements)
|
JohnLunzer/flexx | refs/heads/master | flexx/app/_server.py | 1 | """
High level code related to the server that provides a mainloop and
serves the pages and websocket. Also provides call_later().
"""
from ..event import _loop
from .. import config
# There is always a single current server (except initially there is None)
_current_server = None
def create_server(host=None, port=None, new_loop=False, backend='tornado',
**server_kwargs):
"""
Create a new server object. This is automatically called; users generally
don't need this, unless they want to explicitly specify host/port,
create a fresh server in testing scenarios, or run Flexx in a thread.
Flexx uses a notion of a single current server object. This function
(re)creates that object. If there already was a server object, it is
replaced. It is an error to call this function if the current server
is still running.
Arguments:
host (str): The hostname to serve on. By default
``flexx.config.hostname`` is used. If ``False``, do not listen
(e.g. when integrating with an existing Tornado application).
port (int, str): The port number. If a string is given, it is
hashed to an ephemeral port number. By default
``flexx.config.port`` is used.
new_loop (bool): Whether to create a fresh Tornado IOLoop instance,
which is made current when ``start()`` is called. If ``False``
(default) will use the current IOLoop for this thread.
backend (str): Stub argument; only Tornado is currently supported.
**server_kwargs: keyword arguments passed to the server constructor.
Returns:
server: The server object, see ``current_server()``.
"""
# Lazy load tornado, so that we can use anything we want there without
# preventing other parts of flexx.app from using *this* module.
from ._tornadoserver import TornadoServer # noqa - circular dependency
global _current_server
if backend.lower() != 'tornado':
raise RuntimeError('Flexx server can only run on Tornado (for now).')
# Handle defaults
if host is None:
host = config.hostname
if port is None:
port = config.port
# Stop old server
if _current_server:
_current_server.close()
# Start hosting
_current_server = TornadoServer(host, port, new_loop, **server_kwargs)
assert isinstance(_current_server, AbstractServer)
# Schedule pending calls
_current_server.call_later(0, _loop.loop.iter)
while _pending_call_laters:
delay, callback, args, kwargs = _pending_call_laters.pop(0)
call_later(delay, callback, *args, **kwargs)
return _current_server
def current_server(create=True):
"""
Get the current server object. Creates a server if there is none
and the ``create`` arg is True. Currently, this is always a
TornadoServer object, which has properties:
* serving: a tuple ``(hostname, port)`` specifying the location
being served (or ``None`` if the server is closed).
* app: the ``tornado.web.Application`` instance
* loop: the ``tornado.ioloop.IOLoop`` instance
* server: the ``tornado.httpserver.HttpServer`` instance
"""
if create and not _current_server:
create_server()
return _current_server
def call_later(delay, callback, *args, **kwargs):
"""
Schedule a function call in the current event loop. This function is
thread safe.
Arguments:
delay (float): the delay in seconds. If zero, the callback will
be executed in the next event loop iteration.
callback (callable): the function to call.
args: the positional arguments to call the callback with.
kwargs: the keyword arguments to call the callback with.
"""
server = current_server(False)
if not server:
_pending_call_laters.append((delay, callback, args, kwargs))
else:
server.call_later(delay, callback, *args, **kwargs)
_pending_call_laters = []
# Integrate the "event-loop" of flexx.event
_loop.loop.integrate(lambda f: call_later(0, f))
## Server class
class AbstractServer:
""" This is an attempt to generalize the server, so that in the
future we may have e.g. a Flask or Pyramid server.
A server must implement this, and use the manager to instantiate,
connect and disconnect sessions. The assets object must be used to
server assets to the client.
Arguments:
host (str): the hostname to serve at
port (int): the port to serve at. None or 0 mean to autoselect a port.
"""
def __init__(self, host, port, **kwargs):
self._serving = None
if host is not False:
self._open(host, port, **kwargs)
assert self._serving # Check that subclass set private variable
self._running = False
def start(self):
""" Start the event loop. """
if not self._serving:
raise RuntimeError('Cannot start a closed or non-serving server!')
if self._running:
raise RuntimeError('Cannot start a running server.')
self._running = True
try:
self._start()
finally:
self._running = False
def stop(self):
""" Stop the event loop. This does not close the connection; the server
can be restarted. Thread safe. """
self.call_later(0, self._stop)
def close(self):
""" Close the connection. A closed server cannot be used again. """
if self._running:
raise RuntimeError('Cannot close a running server; need to stop first.')
self._serving = None
self._close()
def _open(self, host, port, **kwargs):
raise NotImplementedError()
def _start(self):
raise NotImplementedError()
def _stop(self):
raise NotImplementedError()
def _close(self):
raise NotImplementedError()
# This method must be implemented directly for performance (its used a lot)
def call_later(self, delay, callback, *args, **kwargs):
""" Call a function in a later event loop iteration. """
raise NotImplementedError()
@property
def serving(self):
""" Get a tuple (hostname, port) that is being served.
Or None if the server is not serving (anymore).
"""
return self._serving
@property
def protocol(self):
""" Get a string representing served protocol
"""
raise NotImplementedError
|
looker/sentry | refs/heads/master | src/sentry/south_migrations/0323_auto__add_unique_releaseenvironment_organization_id_release_id_environ.py | 4 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from sentry.utils.db import is_postgres
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding unique constraint on 'ReleaseEnvironment', fields ['organization_id', 'release_id', 'environment_id']
if is_postgres():
db.commit_transaction()
db.execute(
"CREATE UNIQUE INDEX CONCURRENTLY {} ON sentry_environmentrelease (organization_id, release_id, environment_id)".
format(
db.create_index_name(
'sentry_environmentrelease',
['organization_id', 'release_id', 'environment_id']
),
)
)
db.start_transaction()
else:
db.create_unique(
'sentry_environmentrelease', ['organization_id', 'release_id', 'environment_id']
)
def backwards(self, orm):
# Removing unique constraint on 'ReleaseEnvironment', fields ['organization_id', 'release_id', 'environment_id']
if is_postgres():
db.commit_transaction()
db.execute(
"DROP INDEX CONCURRENTLY {}".format(
db.create_index_name(
'sentry_environmentrelease',
['organization_id', 'release_id', 'environment_id']
),
)
)
db.start_transaction()
else:
db.delete_unique(
'sentry_environmentrelease', ['organization_id', 'release_id', 'environment_id']
)
models = {
'sentry.activity': {
'Meta': {
'object_name': 'Activity'
},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True'
}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.apiapplication': {
'Meta': {
'object_name': 'ApiApplication'
},
'allowed_origins':
('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'client_id': (
'django.db.models.fields.CharField', [], {
'default': "'814a02bea1b544debabf1d3805b3d497aad7031fdd514ef38dcda3ddd487b49b'",
'unique': 'True',
'max_length': '64'
}
),
'client_secret': (
'sentry.db.models.fields.encrypted.EncryptedTextField', [], {
'default': "'68ad6170a6da46d8abe22cebcd5613b6c6bda4f6342046f39e2c645d75860b58'"
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'homepage_url':
('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': (
'django.db.models.fields.CharField', [], {
'default': "'Sound Goat'",
'max_length': '64',
'blank': 'True'
}
),
'owner': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
),
'privacy_url':
('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
}),
'redirect_uris': ('django.db.models.fields.TextField', [], {}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'terms_url':
('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
})
},
'sentry.apiauthorization': {
'Meta': {
'unique_together': "(('user', 'application'),)",
'object_name': 'ApiAuthorization'
},
'application': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiApplication']",
'null': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.apigrant': {
'Meta': {
'object_name': 'ApiGrant'
},
'application': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiApplication']"
}
),
'code': (
'django.db.models.fields.CharField', [], {
'default': "'9359da52947946dda41ca7a0ff1cd9d2'",
'max_length': '64',
'db_index': 'True'
}
),
'expires_at': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2017, 5, 22, 0, 0)',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'redirect_uri': ('django.db.models.fields.CharField', [], {
'max_length': '255'
}),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.apikey': {
'Meta': {
'object_name': 'ApiKey'
},
'allowed_origins':
('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32'
}),
'label': (
'django.db.models.fields.CharField', [], {
'default': "'Default'",
'max_length': '64',
'blank': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Organization']"
}
),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.apitoken': {
'Meta': {
'object_name': 'ApiToken'
},
'application': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiApplication']",
'null': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'expires_at': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2017, 6, 21, 0, 0)',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'refresh_token': (
'django.db.models.fields.CharField', [], {
'default': "'4e29b11467f74b9e8393f1b3b0a8a50ee6399946f3d64189af65b2187890105e'",
'max_length': '64',
'unique': 'True',
'null': 'True'
}
),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'token': (
'django.db.models.fields.CharField', [], {
'default': "'3403b38f441d45ee9c1ef9850e5e270ae36f4e2d00364859b3f16e44c6462b60'",
'unique': 'True',
'max_length': '64'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.auditlogentry': {
'Meta': {
'object_name': 'AuditLogEntry'
},
'actor': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'audit_actors'",
'null': 'True',
'to': "orm['sentry.User']"
}
),
'actor_key': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiKey']",
'null': 'True',
'blank': 'True'
}
),
'actor_label': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ip_address': (
'django.db.models.fields.GenericIPAddressField', [], {
'max_length': '39',
'null': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'target_object':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'target_user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'audit_targets'",
'null': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.authenticator': {
'Meta': {
'unique_together': "(('user', 'type'),)",
'object_name': 'Authenticator',
'db_table': "'auth_authenticator'"
},
'config': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}),
'created_at':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'last_used_at': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.authidentity': {
'Meta': {
'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))",
'object_name': 'AuthIdentity'
},
'auth_provider': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.AuthProvider']"
}
),
'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'last_synced':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_verified':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.authprovider': {
'Meta': {
'object_name': 'AuthProvider'
},
'config':
('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'default_global_access':
('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'default_role':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '50'
}),
'default_teams': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Team']",
'symmetrical': 'False',
'blank': 'True'
}
),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '0'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']",
'unique': 'True'
}
),
'provider': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'sync_time':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
})
},
'sentry.broadcast': {
'Meta': {
'object_name': 'Broadcast'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_expires': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2017, 5, 29, 0, 0)',
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active':
('django.db.models.fields.BooleanField', [], {
'default': 'True',
'db_index': 'True'
}),
'link': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.CharField', [], {
'max_length': '256'
}),
'title': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'upstream_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'blank': 'True'
}
)
},
'sentry.broadcastseen': {
'Meta': {
'unique_together': "(('broadcast', 'user'),)",
'object_name': 'BroadcastSeen'
},
'broadcast': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Broadcast']"
}
),
'date_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.commit': {
'Meta': {
'unique_together': "(('repository_id', 'key'),)",
'object_name': 'Commit',
'index_together': "(('repository_id', 'date_added'),)"
},
'author': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.CommitAuthor']",
'null': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'message': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'repository_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.commitauthor': {
'Meta': {
'unique_together':
"(('organization_id', 'email'), ('organization_id', 'external_id'))",
'object_name':
'CommitAuthor'
},
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'external_id':
('django.db.models.fields.CharField', [], {
'max_length': '164',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.commitfilechange': {
'Meta': {
'unique_together': "(('commit', 'filename'),)",
'object_name': 'CommitFileChange'
},
'commit': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Commit']"
}
),
'filename': ('django.db.models.fields.CharField', [], {
'max_length': '255'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '1'
})
},
'sentry.counter': {
'Meta': {
'object_name': 'Counter',
'db_table': "'sentry_projectcounter'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'unique': 'True'
}
),
'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.deploy': {
'Meta': {
'object_name': 'Deploy'
},
'date_finished':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_started':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'environment_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'notified': (
'django.db.models.fields.NullBooleanField', [], {
'default': 'False',
'null': 'True',
'db_index': 'True',
'blank': 'True'
}
),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
),
'url': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
)
},
'sentry.distribution': {
'Meta': {
'unique_together': "(('release', 'name'),)",
'object_name': 'Distribution'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.dsymapp': {
'Meta': {
'unique_together': "(('project', 'platform', 'app_id'),)",
'object_name': 'DSymApp'
},
'app_id': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_synced':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'platform':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'sync_id':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
})
},
'sentry.dsymbundle': {
'Meta': {
'object_name': 'DSymBundle'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymObject']"
}
),
'sdk': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymSDK']"
}
)
},
'sentry.dsymobject': {
'Meta': {
'object_name': 'DSymObject'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_path': ('django.db.models.fields.TextField', [], {
'db_index': 'True'
}),
'uuid':
('django.db.models.fields.CharField', [], {
'max_length': '36',
'db_index': 'True'
}),
'vmaddr':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'vmsize':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
})
},
'sentry.dsymsdk': {
'Meta': {
'object_name':
'DSymSDK',
'index_together':
"[('version_major', 'version_minor', 'version_patchlevel', 'version_build')]"
},
'dsym_type':
('django.db.models.fields.CharField', [], {
'max_length': '20',
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'sdk_name': ('django.db.models.fields.CharField', [], {
'max_length': '20'
}),
'version_build': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'version_major': ('django.db.models.fields.IntegerField', [], {}),
'version_minor': ('django.db.models.fields.IntegerField', [], {}),
'version_patchlevel': ('django.db.models.fields.IntegerField', [], {})
},
'sentry.dsymsymbol': {
'Meta': {
'unique_together': "[('object', 'address')]",
'object_name': 'DSymSymbol'
},
'address':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymObject']"
}
),
'symbol': ('django.db.models.fields.TextField', [], {})
},
'sentry.environment': {
'Meta': {
'unique_together': "(('project_id', 'name'), ('organization_id', 'name'))",
'object_name': 'Environment'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'projects': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Project']",
'through': "orm['sentry.EnvironmentProject']",
'symmetrical': 'False'
}
)
},
'sentry.environmentproject': {
'Meta': {
'unique_together': "(('project', 'environment'),)",
'object_name': 'EnvironmentProject'
},
'environment': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Environment']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.event': {
'Meta': {
'unique_together': "(('project_id', 'event_id'),)",
'object_name': 'Event',
'db_table': "'sentry_message'",
'index_together': "(('group_id', 'datetime'),)"
},
'data':
('sentry.db.models.fields.node.NodeField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'event_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'db_column': "'message_id'"
}
),
'group_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'message': ('django.db.models.fields.TextField', [], {}),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'time_spent':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'null': 'True'
})
},
'sentry.eventmapping': {
'Meta': {
'unique_together': "(('project_id', 'event_id'),)",
'object_name': 'EventMapping'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventprocessingissue': {
'Meta': {
'unique_together': "(('raw_event', 'processing_issue'),)",
'object_name': 'EventProcessingIssue'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'processing_issue': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ProcessingIssue']"
}
),
'raw_event': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.RawEvent']"
}
)
},
'sentry.eventtag': {
'Meta': {
'unique_together':
"(('event_id', 'key_id', 'value_id'),)",
'object_name':
'EventTag',
'index_together':
"(('project_id', 'key_id', 'value_id'), ('group_id', 'key_id', 'value_id'))"
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventuser': {
'Meta': {
'unique_together':
"(('project', 'ident'), ('project', 'hash'))",
'object_name':
'EventUser',
'index_together':
"(('project', 'email'), ('project', 'username'), ('project', 'ip_address'))"
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'null': 'True'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'ip_address': (
'django.db.models.fields.GenericIPAddressField', [], {
'max_length': '39',
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'username':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
})
},
'sentry.file': {
'Meta': {
'object_name': 'File'
},
'blob': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'legacy_blob'",
'null': 'True',
'to': "orm['sentry.FileBlob']"
}
),
'blobs': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.FileBlob']",
'through': "orm['sentry.FileBlobIndex']",
'symmetrical': 'False'
}
),
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '40',
'null': 'True'
}),
'headers': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'path': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'size':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'timestamp': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.fileblob': {
'Meta': {
'object_name': 'FileBlob'
},
'checksum':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '40'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'path': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'size':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'timestamp': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
)
},
'sentry.fileblobindex': {
'Meta': {
'unique_together': "(('file', 'blob', 'offset'),)",
'object_name': 'FileBlobIndex'
},
'blob': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.FileBlob']"
}
),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.globaldsymfile': {
'Meta': {
'object_name': 'GlobalDSymFile'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'uuid':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '36'
})
},
'sentry.group': {
'Meta': {
'unique_together': "(('project', 'short_id'),)",
'object_name': 'Group',
'db_table': "'sentry_groupedmessage'",
'index_together': "(('project', 'first_release'),)"
},
'active_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'culprit': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True',
'db_column': "'view'",
'blank': 'True'
}
),
'data': (
'sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True',
'blank': 'True'
}
),
'first_release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']",
'null': 'True',
'on_delete': 'models.PROTECT'
}
),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_public': (
'django.db.models.fields.NullBooleanField', [], {
'default': 'False',
'null': 'True',
'blank': 'True'
}
),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'level': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "''",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'null': 'True'
}
),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'resolved_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'short_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'time_spent_count':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'time_spent_total':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'times_seen': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '1',
'db_index': 'True'
}
)
},
'sentry.groupassignee': {
'Meta': {
'object_name': 'GroupAssignee',
'db_table': "'sentry_groupasignee'"
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'unique': 'True',
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'sentry_assignee_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupbookmark': {
'Meta': {
'unique_together': "(('project', 'user', 'group'),)",
'object_name': 'GroupBookmark'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'sentry_bookmark_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupcommitresolution': {
'Meta': {
'unique_together': "(('group_id', 'commit_id'),)",
'object_name': 'GroupCommitResolution'
},
'commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
})
},
'sentry.groupemailthread': {
'Meta': {
'unique_together': "(('email', 'group'), ('email', 'msgid'))",
'object_name': 'GroupEmailThread'
},
'date': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'groupemail_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'msgid': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'groupemail_set'",
'to': "orm['sentry.Project']"
}
)
},
'sentry.grouphash': {
'Meta': {
'unique_together': "(('project', 'hash'),)",
'object_name': 'GroupHash'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
)
},
'sentry.groupmeta': {
'Meta': {
'unique_together': "(('group', 'key'),)",
'object_name': 'GroupMeta'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.groupredirect': {
'Meta': {
'object_name': 'GroupRedirect'
},
'group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'previous_group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'unique': 'True'
})
},
'sentry.grouprelease': {
'Meta': {
'unique_together': "(('group_id', 'release_id', 'environment'),)",
'object_name': 'GroupRelease'
},
'environment':
('django.db.models.fields.CharField', [], {
'default': "''",
'max_length': '64'
}),
'first_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.groupresolution': {
'Meta': {
'object_name': 'GroupResolution'
},
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'unique': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.grouprulestatus': {
'Meta': {
'unique_together': "(('rule', 'group'),)",
'object_name': 'GroupRuleStatus'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_active': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'rule': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Rule']"
}
),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
})
},
'sentry.groupseen': {
'Meta': {
'unique_together': "(('user', 'group'),)",
'object_name': 'GroupSeen'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'db_index': 'False'
}
)
},
'sentry.groupsnooze': {
'Meta': {
'object_name': 'GroupSnooze'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'unique': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'until': ('django.db.models.fields.DateTimeField', [], {})
},
'sentry.groupsubscription': {
'Meta': {
'unique_together': "(('group', 'user'),)",
'object_name': 'GroupSubscription'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'subscription_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'subscription_set'",
'to': "orm['sentry.Project']"
}
),
'reason':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.grouptagkey': {
'Meta': {
'unique_together': "(('project', 'group', 'key'),)",
'object_name': 'GroupTagKey'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'values_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.grouptagvalue': {
'Meta': {
'unique_together': "(('group_id', 'key', 'value'),)",
'object_name': 'GroupTagValue',
'db_table': "'sentry_messagefiltervalue'",
'index_together': "(('project_id', 'key', 'value', 'last_seen'),)"
},
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'times_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.lostpasswordhash': {
'Meta': {
'object_name': 'LostPasswordHash'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'unique': 'True'
}
)
},
'sentry.option': {
'Meta': {
'object_name': 'Option'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '64'
}),
'last_updated':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {
'object_name': 'Organization'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'default_role':
('django.db.models.fields.CharField', [], {
'default': "'member'",
'max_length': '32'
}),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '1'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'members': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'org_memberships'",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationMember']",
'to': "orm['sentry.User']"
}
),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'slug':
('django.db.models.fields.SlugField', [], {
'unique': 'True',
'max_length': '50'
}),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.organizationaccessrequest': {
'Meta': {
'unique_together': "(('team', 'member'),)",
'object_name': 'OrganizationAccessRequest'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'member': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.OrganizationMember']"
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.organizationavatar': {
'Meta': {
'object_name': 'OrganizationAvatar'
},
'avatar_type':
('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']",
'unique': 'True',
'null': 'True',
'on_delete': 'models.SET_NULL'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': (
'django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32',
'db_index': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'avatar'",
'unique': 'True',
'to': "orm['sentry.Organization']"
}
)
},
'sentry.organizationmember': {
'Meta': {
'unique_together': "(('organization', 'user'), ('organization', 'email'))",
'object_name': 'OrganizationMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': (
'django.db.models.fields.EmailField', [], {
'max_length': '75',
'null': 'True',
'blank': 'True'
}
),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '0'
}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'member_set'",
'to': "orm['sentry.Organization']"
}
),
'role':
('django.db.models.fields.CharField', [], {
'default': "'member'",
'max_length': '32'
}),
'teams': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Team']",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationMemberTeam']",
'blank': 'True'
}
),
'token': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'unique': 'True',
'null': 'True',
'blank': 'True'
}
),
'type': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '50',
'blank': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'sentry_orgmember_set'",
'null': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.organizationmemberteam': {
'Meta': {
'unique_together': "(('team', 'organizationmember'),)",
'object_name': 'OrganizationMemberTeam',
'db_table': "'sentry_organizationmember_teams'"
},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'organizationmember': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.OrganizationMember']"
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.organizationonboardingtask': {
'Meta': {
'unique_together': "(('organization', 'task'),)",
'object_name': 'OrganizationOnboardingTask'
},
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_completed':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.organizationoption': {
'Meta': {
'unique_together': "(('organization', 'key'),)",
'object_name': 'OrganizationOption',
'db_table': "'sentry_organizationoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.processingissue': {
'Meta': {
'unique_together': "(('project', 'checksum', 'type'),)",
'object_name': 'ProcessingIssue'
},
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '40',
'db_index': 'True'
}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '30'
})
},
'sentry.project': {
'Meta': {
'unique_together': "(('team', 'slug'), ('organization', 'slug'))",
'object_name': 'Project'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'first_event': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'flags':
('django.db.models.fields.BigIntegerField', [], {
'default': '0',
'null': 'True'
}),
'forced_color': (
'django.db.models.fields.CharField', [], {
'max_length': '6',
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'public': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50',
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.projectbookmark': {
'Meta': {
'unique_together': "(('project_id', 'user'),)",
'object_name': 'ProjectBookmark'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.projectdsymfile': {
'Meta': {
'unique_together': "(('project', 'uuid'),)",
'object_name': 'ProjectDSymFile'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'uuid': ('django.db.models.fields.CharField', [], {
'max_length': '36'
})
},
'sentry.projectkey': {
'Meta': {
'object_name': 'ProjectKey'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Project']"
}
),
'public_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'rate_limit_count':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'rate_limit_window':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'roles': ('django.db.models.fields.BigIntegerField', [], {
'default': '1'
}),
'secret_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.projectoption': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'ProjectOption',
'db_table': "'sentry_projectoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.projectplatform': {
'Meta': {
'unique_together': "(('project_id', 'platform'),)",
'object_name': 'ProjectPlatform'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'platform': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.rawevent': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'RawEvent'
},
'data':
('sentry.db.models.fields.node.NodeField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.release': {
'Meta': {
'unique_together': "(('organization', 'version'),)",
'object_name': 'Release'
},
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_released':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'date_started':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'new_groups':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'owner': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True',
'blank': 'True'
}
),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'projects': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'releases'",
'symmetrical': 'False',
'through': "orm['sentry.ReleaseProject']",
'to': "orm['sentry.Project']"
}
),
'ref': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'url': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'version': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.releasecommit': {
'Meta': {
'unique_together': "(('release', 'commit'), ('release', 'order'))",
'object_name': 'ReleaseCommit'
},
'commit': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Commit']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True',
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.releaseenvironment': {
'Meta': {
'unique_together':
"(('project_id', 'release_id', 'environment_id'), ('organization_id', 'release_id', 'environment_id'))",
'object_name':
'ReleaseEnvironment',
'db_table':
"'sentry_environmentrelease'"
},
'environment_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'first_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True',
'db_index': 'True'
}
),
'release_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.releasefile': {
'Meta': {
'unique_together': "(('release', 'ident'),)",
'object_name': 'ReleaseFile'
},
'dist': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Distribution']",
'null': 'True'
}
),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'name': ('django.db.models.fields.TextField', [], {}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.releaseheadcommit': {
'Meta': {
'unique_together': "(('repository_id', 'release'),)",
'object_name': 'ReleaseHeadCommit'
},
'commit': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Commit']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
),
'repository_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.releaseproject': {
'Meta': {
'unique_together': "(('project', 'release'),)",
'object_name': 'ReleaseProject',
'db_table': "'sentry_release_project'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'new_groups': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.repository': {
'Meta': {
'unique_together':
"(('organization_id', 'name'), ('organization_id', 'provider', 'external_id'))",
'object_name':
'Repository'
},
'config': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'external_id':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'provider':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'url': ('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
})
},
'sentry.reprocessingreport': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'ReprocessingReport'
},
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.rule': {
'Meta': {
'object_name': 'Rule'
},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.savedsearch': {
'Meta': {
'unique_together': "(('project', 'name'),)",
'object_name': 'SavedSearch'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_default': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'owner': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'query': ('django.db.models.fields.TextField', [], {})
},
'sentry.savedsearchuserdefault': {
'Meta': {
'unique_together': "(('project', 'user'),)",
'object_name': 'SavedSearchUserDefault',
'db_table': "'sentry_savedsearch_userdefault'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'savedsearch': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.SavedSearch']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.tagkey': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'TagKey',
'db_table': "'sentry_filterkey'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'label':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'values_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.tagvalue': {
'Meta': {
'unique_together': "(('project', 'key', 'value'),)",
'object_name': 'TagValue',
'db_table': "'sentry_filtervalue'"
},
'data': (
'sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True',
'blank': 'True'
}
),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'times_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.team': {
'Meta': {
'unique_together': "(('organization', 'slug'),)",
'object_name': 'Team'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50'
}),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.user': {
'Meta': {
'object_name': 'User',
'db_table': "'auth_user'"
},
'date_joined':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'blank': 'True'
}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'is_managed': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_password_expired':
('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_staff': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'last_login':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_password_change': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'name': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'db_column': "'first_name'",
'blank': 'True'
}
),
'password': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'session_nonce':
('django.db.models.fields.CharField', [], {
'max_length': '12',
'null': 'True'
}),
'username':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '128'
})
},
'sentry.useravatar': {
'Meta': {
'object_name': 'UserAvatar'
},
'avatar_type':
('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']",
'unique': 'True',
'null': 'True',
'on_delete': 'models.SET_NULL'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': (
'django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32',
'db_index': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'avatar'",
'unique': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.useremail': {
'Meta': {
'unique_together': "(('user', 'email'),)",
'object_name': 'UserEmail'
},
'date_hash_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_verified': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'emails'",
'to': "orm['sentry.User']"
}
),
'validation_hash': (
'django.db.models.fields.CharField', [], {
'default': "u'9NyuyDmJAjit916rhM3jH4xWOWxvtNlQ'",
'max_length': '32'
}
)
},
'sentry.useroption': {
'Meta': {
'unique_together': "(('user', 'project', 'key'), ('user', 'organization', 'key'))",
'object_name': 'UserOption'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']",
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.userreport': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'UserReport',
'index_together': "(('project', 'event_id'), ('project', 'date_added'))"
},
'comments': ('django.db.models.fields.TextField', [], {}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.versiondsymfile': {
'Meta': {
'unique_together': "(('dsym_file', 'version', 'build'),)",
'object_name': 'VersionDSymFile'
},
'build':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'dsym_app': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymApp']"
}
),
'dsym_file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ProjectDSymFile']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'version': ('django.db.models.fields.CharField', [], {
'max_length': '32'
})
}
}
complete_apps = ['sentry']
|
Work4Labs/lettuce | refs/heads/master | tests/integration/lib/Django-1.2.5/django/contrib/gis/maps/__init__.py | 12133432 | |
franosincic/edx-platform | refs/heads/master | openedx/core/djangoapps/credentials/migrations/__init__.py | 12133432 | |
salty-horse/scummvm | refs/heads/master | devtools/tasmrecover/tasm/__init__.py | 12133432 | |
cpollard1001/FreeCAD_sf_master | refs/heads/master | src/Tools/MakeMacBundleRelocatable.py | 16 | import os
import sys
import subprocess
import pprint
SYS_PATHS = ["/System/", "/usr/lib/"]
class LibraryNotFound(Exception):
pass
class Node:
"""
self.path should be an absolute path to self.name
"""
def __init__(self, name, path="", children=None):
self.name = name
self.path = path
if not children:
children = list()
self.children = children
self._marked = False
def __eq__(self, other):
if not isinstance(other, Node):
return False
return self.name == other.name
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.name)
class DepsGraph:
graph = {}
def in_graph(self, node):
return node.name in self.graph.keys()
def add_node(self, node):
self.graph[node.name] = node
def get_node(self, name):
if self.graph.has_key(name):
return self.graph[name]
return None
def visit(self, operation, op_args=[]):
""""
Preform a depth first visit of the graph, calling operation
on each node.
"""
stack = []
for k in self.graph.keys():
self.graph[k]._marked = False
for k in self.graph.keys():
if not self.graph[k]._marked:
stack.append(k)
while stack:
node_key = stack.pop()
self.graph[node_key]._marked = True
for ck in self.graph[node_key].children:
if not self.graph[ck]._marked:
stack.append(ck)
operation(self, self.graph[node_key], *op_args)
def is_macho(path):
output = subprocess.check_output(["file", path])
if output.count("Mach-O") != 0:
return True
return False
def is_system_lib(lib):
for p in SYS_PATHS:
if lib.startswith(p):
return True
return False
def get_path(name, search_paths):
for path in search_paths:
if os.path.isfile(os.path.join(path, name)):
return path
return None
def list_install_names(path_macho):
output = subprocess.check_output(["otool", "-L", path_macho])
lines = output.split("\t")
libs = []
#first line is the the filename, and if it is a library, the second line
#is the install name of it
if path_macho.endswith(os.path.basename(lines[1].split(" (")[0])):
lines = lines[2:]
else:
lines = lines[1:]
for line in lines:
lib = line.split(" (")[0]
if not is_system_lib(lib):
libs.append(lib)
return libs
def library_paths(install_names, search_paths):
paths = []
for name in install_names:
path = os.path.dirname(name)
lib_name = os.path.basename(name)
if path == "" or name[0] == "@":
#not absolute -- we need to find the path of this lib
path = get_path(lib_name, search_paths)
paths.append(os.path.join(path, lib_name))
return paths
def create_dep_nodes(install_names, search_paths):
"""
Return a list of Node objects from the provided install names.
"""
nodes = []
for lib in install_names:
install_path = os.path.dirname(lib)
lib_name = os.path.basename(lib)
#even if install_path is absolute, see if library can be found by
#searching search_paths, so that we have control over what library
#location to use
path = get_path(lib_name, search_paths)
if install_path != "" and lib[0] != "@":
#we have an absolte path install name
if not path:
path = install_path
if not path:
raise LibraryNotFound(lib_name + "not found in given paths")
nodes.append(Node(lib_name, path))
return nodes
def paths_at_depth(prefix, paths, depth):
filtered = []
for p in paths:
dirs = os.path.join(prefix, p).strip('/').split('/')
if len(dirs) == depth:
filtered.append(p)
return filtered
def should_visit(prefix, path_filters, path):
s_path = path.strip('/').split('/')
filters = []
#we only want to use filters if they have the same parent as path
for rel_pf in path_filters:
pf = os.path.join(prefix, rel_pf)
if os.path.split(pf)[0] == os.path.split(path)[0]:
filters.append(pf)
if not filters:
#no filter that applies to this path
return True
for pf in filters:
s_filter = pf.strip('/').split('/')
length = len(s_filter)
matched = 0
for i in range(len(s_path)):
if s_path[i] == s_filter[i]:
matched += 1
if matched == length or matched == len(s_path):
return True
return False
def build_deps_graph(graph, bundle_path, dirs_filter=None, search_paths=[]):
"""
Walk bundle_path and build a graph of the encountered Mach-O binaries
and there dependencies
"""
#make a local copy since we add to it
s_paths = list(search_paths)
visited = {}
for root, dirs, files in os.walk(bundle_path):
if dirs_filter != None:
dirs[:] = [d for d in dirs if should_visit(bundle_path, dirs_filter,
os.path.join(root, d))]
s_paths.insert(0, root)
for f in files:
fpath = os.path.join(root, f)
ext = os.path.splitext(f)[1]
if ( (ext == "" and is_macho(fpath)) or
ext == ".so" or ext == ".dylib" ):
visited[fpath] = False
stack = []
for k in visited.keys():
if not visited[k]:
stack.append(k)
while stack:
k2 = stack.pop()
visited[k2] = True
node = Node(os.path.basename(k2), os.path.dirname(k2))
if not graph.in_graph(node):
graph.add_node(node)
deps = create_dep_nodes(list_install_names(k2), s_paths)
for d in deps:
if d.name not in node.children:
node.children.append(d.name)
dk = os.path.join(d.path, d.name)
if dk not in visited.keys():
visited[dk] = False
if not visited[dk]:
stack.append(dk)
def in_bundle(lib, bundle_path):
if lib.startswith(bundle_path):
return True
return False
def copy_into_bundle(graph, node, bundle_path):
if not in_bundle(node.path, bundle_path):
subprocess.check_call(["cp", "-L", os.path.join(node.path, node.name),
os.path.join(bundle_path, "lib", node.name)])
node.path = os.path.join(bundle_path, "lib")
#fix permissions
subprocess.check_call(["chmod", "a+w", os.path.join(bundle_path,
"lib", node.name)])
def add_rpaths(graph, node, bundle_path):
if node.children:
lib = os.path.join(node.path, node.name)
if in_bundle(lib, bundle_path):
install_names = list_install_names(lib)
rpaths = []
for install_name in install_names:
name = os.path.basename(install_name)
#change install names to use rpaths
subprocess.check_call(["install_name_tool", "-change",
install_name, "@rpath/" + name, lib])
dep_node = node.children[node.children.index(name)]
rel_path = os.path.relpath(graph.get_node(dep_node).path,
node.path)
rpath = ""
if rel_path == ".":
rpath = "@loader_path/"
else:
rpath = "@loader_path/" + rel_path + "/"
if rpath not in rpaths:
rpaths.append(rpath)
for path in rpaths:
subprocess.call(["install_name_tool", "-add_rpath", path, lib])
def main():
if len(sys.argv) < 2:
print "Usage " + sys.argv[0] + " path [additional search paths]"
quit()
path = sys.argv[1]
bundle_path = os.path.abspath(os.path.join(path, "Contents"))
graph = DepsGraph()
dir_filter = ["bin", "lib", "Mod", "Mod/PartDesign",
"lib/python2.7/site-packages",
"lib/python2.7/lib-dynload"]
search_paths = [bundle_path + "/lib"] + sys.argv[2:]
build_deps_graph(graph, bundle_path, dir_filter, search_paths)
graph.visit(copy_into_bundle, [bundle_path])
graph.visit(add_rpaths, [bundle_path])
if __name__ == "__main__":
main()
|
nuagenetworks/vspk-python | refs/heads/master | vspk/v5_0/fetchers/nudestinationurls_fetcher.py | 2 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from bambou import NURESTFetcher
class NUDestinationurlsFetcher(NURESTFetcher):
""" Represents a NUDestinationurls fetcher
Notes:
This fetcher enables to fetch NUDestinationurl objects.
See:
bambou.NURESTFetcher
"""
@classmethod
def managed_class(cls):
""" Return NUDestinationurl class that is managed.
Returns:
.NUDestinationurl: the managed class
"""
from .. import NUDestinationurl
return NUDestinationurl
|
mdietrichc2c/OCB | refs/heads/8.0 | addons/hw_proxy/controllers/main.py | 71 | # -*- coding: utf-8 -*-
import logging
import commands
import simplejson
import os
import os.path
import openerp
import time
import random
import subprocess
import simplejson
import werkzeug
import werkzeug.wrappers
_logger = logging.getLogger(__name__)
from openerp import http
from openerp.http import request
# drivers modules must add to drivers an object with a get_status() method
# so that 'status' can return the status of all active drivers
drivers = {}
class Proxy(http.Controller):
def get_status(self):
statuses = {}
for driver in drivers:
statuses[driver] = drivers[driver].get_status()
return statuses
@http.route('/hw_proxy/hello', type='http', auth='none', cors='*')
def hello(self):
return "ping"
@http.route('/hw_proxy/handshake', type='json', auth='none', cors='*')
def handshake(self):
return True
@http.route('/hw_proxy/status', type='http', auth='none', cors='*')
def status_http(self):
resp = """
<!DOCTYPE HTML>
<html>
<head>
<title>Odoo's PosBox</title>
<style>
body {
width: 480px;
margin: 60px auto;
font-family: sans-serif;
text-align: justify;
color: #6B6B6B;
}
.device {
border-bottom: solid 1px rgb(216,216,216);
padding: 9px;
}
.device:nth-child(2n) {
background:rgb(240,240,240);
}
</style>
</head>
<body>
<h1>Hardware Status</h1>
<p>The list of enabled drivers and their status</p>
"""
statuses = self.get_status()
for driver in statuses:
status = statuses[driver]
if status['status'] == 'connecting':
color = 'black'
elif status['status'] == 'connected':
color = 'green'
else:
color = 'red'
resp += "<h3 style='color:"+color+";'>"+driver+' : '+status['status']+"</h3>\n"
resp += "<ul>\n"
for msg in status['messages']:
resp += '<li>'+msg+'</li>\n'
resp += "</ul>\n"
resp += """
<h2>Connected Devices</h2>
<p>The list of connected USB devices as seen by the posbox</p>
"""
devices = commands.getoutput("lsusb").split('\n')
resp += "<div class='devices'>\n"
for device in devices:
device_name = device[device.find('ID')+2:]
resp+= "<div class='device' data-device='"+device+"'>"+device_name+"</div>\n"
resp += "</div>\n"
resp += """
<h2>Add New Printer</h2>
<p>
Copy and paste your printer's device description in the form below. You can find
your printer's description in the device list above. If you find that your printer works
well, please send your printer's description to <a href='mailto:[email protected]'>
[email protected]</a> so that we can add it to the default list of supported devices.
</p>
<form action='/hw_proxy/escpos/add_supported_device' method='GET'>
<input type='text' style='width:400px' name='device_string' placeholder='123a:b456 Sample Device description' />
<input type='submit' value='submit' />
</form>
<h2>Reset To Defaults</h2>
<p>If the added devices cause problems, you can <a href='/hw_proxy/escpos/reset_supported_devices'>Reset the
device list to factory default.</a> This operation cannot be undone.</p>
"""
resp += "</body>\n</html>\n\n"
return request.make_response(resp,{
'Cache-Control': 'no-cache',
'Content-Type': 'text/html; charset=utf-8',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET',
})
@http.route('/hw_proxy/status_json', type='json', auth='none', cors='*')
def status_json(self):
return self.get_status()
@http.route('/hw_proxy/scan_item_success', type='json', auth='none', cors='*')
def scan_item_success(self, ean):
"""
A product has been scanned with success
"""
print 'scan_item_success: ' + str(ean)
@http.route('/hw_proxy/scan_item_error_unrecognized', type='json', auth='none', cors='*')
def scan_item_error_unrecognized(self, ean):
"""
A product has been scanned without success
"""
print 'scan_item_error_unrecognized: ' + str(ean)
@http.route('/hw_proxy/help_needed', type='json', auth='none', cors='*')
def help_needed(self):
"""
The user wants an help (ex: light is on)
"""
print "help_needed"
@http.route('/hw_proxy/help_canceled', type='json', auth='none', cors='*')
def help_canceled(self):
"""
The user stops the help request
"""
print "help_canceled"
@http.route('/hw_proxy/payment_request', type='json', auth='none', cors='*')
def payment_request(self, price):
"""
The PoS will activate the method payment
"""
print "payment_request: price:"+str(price)
return 'ok'
@http.route('/hw_proxy/payment_status', type='json', auth='none', cors='*')
def payment_status(self):
print "payment_status"
return { 'status':'waiting' }
@http.route('/hw_proxy/payment_cancel', type='json', auth='none', cors='*')
def payment_cancel(self):
print "payment_cancel"
@http.route('/hw_proxy/transaction_start', type='json', auth='none', cors='*')
def transaction_start(self):
print 'transaction_start'
@http.route('/hw_proxy/transaction_end', type='json', auth='none', cors='*')
def transaction_end(self):
print 'transaction_end'
@http.route('/hw_proxy/cashier_mode_activated', type='json', auth='none', cors='*')
def cashier_mode_activated(self):
print 'cashier_mode_activated'
@http.route('/hw_proxy/cashier_mode_deactivated', type='json', auth='none', cors='*')
def cashier_mode_deactivated(self):
print 'cashier_mode_deactivated'
@http.route('/hw_proxy/open_cashbox', type='json', auth='none', cors='*')
def open_cashbox(self):
print 'open_cashbox'
@http.route('/hw_proxy/print_receipt', type='json', auth='none', cors='*')
def print_receipt(self, receipt):
print 'print_receipt' + str(receipt)
@http.route('/hw_proxy/is_scanner_connected', type='json', auth='none', cors='*')
def print_receipt(self, receipt):
print 'is_scanner_connected?'
return False
@http.route('/hw_proxy/scanner', type='json', auth='none', cors='*')
def print_receipt(self, receipt):
print 'scanner'
time.sleep(10)
return ''
@http.route('/hw_proxy/log', type='json', auth='none', cors='*')
def log(self, arguments):
_logger.info(' '.join(str(v) for v in arguments))
@http.route('/hw_proxy/print_pdf_invoice', type='json', auth='none', cors='*')
def print_pdf_invoice(self, pdfinvoice):
print 'print_pdf_invoice' + str(pdfinvoice)
|
suzuki-no-suke/colledb | refs/heads/master | src/do_both_server.py | 1 | from bottle import route, get, post, abort, run, template, static_file, request
import bottle.ext.sqlite
import bottle
import os
import uuid
import datetime
import codecs
# app config
## server address
SERVER_ADDRESS = '192.168.1.0'
SERVER_PORTNO = 80
if "SERVER_ADDRESS" in os.environ:
SERVER_ADDRESS = os.environ["SERVER_ADDRESS"]
print("Server -> {}:{}".format(SERVER_ADDRESS, SERVER_PORTNO))
## api server address
API_SERVER_ADDRESS = 'localhost'
if "API_SERVER_ADDRESS" in os.environ:
API_SERVER_ADDRESS = os.environ["API_SERVER_ADDRESS"]
print("API server -> {}:80".format(API_SERVER_ADDRESS))
## root path
ROOT_PATH = "."
if "ROOT_PATH" in os.environ:
ROOT_PATH = os.environ["ROOT_PATH"]
print("root path -> {}".format(ROOT_PATH))
DATA_PATH = os.path.abspath(ROOT_PATH + "/data")
DB_PATH = os.path.abspath(ROOT_PATH + "/data/data.db")
# install sqlite3 plugin
app = bottle.Bottle()
plugin = bottle.ext.sqlite.Plugin(DB_PATH)
app.install(plugin)
# set template path
bottle.TEMPLATE_PATH.insert(0, os.path.abspath(ROOT_PATH + "/view/"))
# routing
@app.route('/')
@app.route('/app/list')
def show_list(db):
#db.text_factory = str
# gether book information
rows = db.execute("SELECT * FROM books").fetchall()
book_list = []
if rows:
for r in rows:
title = r['title'].encode('utf-8')
author = r['author'].encode('utf-8')
tags = r['tags'].encode('utf-8')
#print("result -> {} / t {}, au {}, tag {}".format(
# r['id'], title, author, tags))
book = {}
book['id'] = r['id']
# make summary
summary = title + " / " + author + " / " + tags
# TODO : fix below : it does not work
#summary = "{} - {} - {}".format(
# title[:50], author[:20], tags[:20])
#print("summary -> {} / type -> {}".format(summary, type(summary)))
book['summary'] = summary
# find image
img_no = -1
for no in range(1, 5):
img_key = 'image{}'.format(no)
if r[img_key] and not r[img_key].isspace():
img_no = no
break # for no loop
img_url = ""
if img_no >= 1:
img_url = "/image/{}/{}".format(book['id'], img_no)
book['img_src'] = img_url
book_list.append(book)
return template('page_list', book_list=book_list)
# no books, or error
temporary_top = """
<p> no book or some error happens. <p>
Go to <a href="/app/add">add form</a>
"""[1:-1]
return temporary_top
@app.get('/app/book/<book_id:int>')
def show_book(book_id, db):
print("book page request => {}".format(book_id))
# gether book information
row = db.execute('SELECT * FROM books WHERE id = ?', (book_id,)).fetchone()
if row:
book = {}
book['id'] = row['id']
book['title'] = row['title']
book['author'] = row['author']
book['tags'] = row['tags']
# gether image information
img_nos = []
for no in range(1, 5):
img_key = "image{}".format(no)
if row[img_key] and not row[img_key].isspace():
img_nos.insert(0, no)
return template('page_show_book', book=book, image_nos=img_nos)
return abort(404, "book id {} is not found.".format(id))
@app.post('/app/book/<book_id:int>')
def update_book_and_show(book_id, db):
print("edit book - posted {}".format(book_id))
# TODO : "PUT" is better, but, do so ?
# utf-8 workaround
db.text_factory = str
# gether book info
b_id = book_id
b_title = request.forms.get('title')
b_author = request.forms.get('author')
b_tags = request.forms.get('tags')
b_update = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
# image process
updated_images = {}
# TODO : be class or function or method
for no in range(1, 5):
img_key = "image{}".format(no)
if img_key in request.files:
image = request.files.get(img_key)
ext = os.path.splitext(image.filename)
imgid = uuid.uuid4()
b_filename = str(imgid) + ext[1]
print("save file name -> {}".format(b_filename))
img_path = os.path.abspath(DATA_PATH + "/" + b_filename)
with open(img_path, "wb") as fimg:
fimg.write(image.file.read())
updated_images[img_key] = b_filename
# update book info
image_all_key = ['image{}'.format(no) for no in range(1, 5)]
img_query_key = [key for key in image_all_key if key in updated_images.keys()]
img_id_value = tuple(updated_images[key] for key in img_query_key)
query_value = (b_title, b_author, b_tags, b_update,)
all_query_key = ['title', 'author', 'tags', 'updated'] + img_query_key
query = """
UPDATE books SET
{} = ?
WHERE id = ?
"""[1:-1].format(' = ?,'.join(all_query_key))
print("query => {}".format(query))
value_tuple = query_value + img_id_value + (b_id,)
print("value -> {}".format(value_tuple))
db.execute(query, value_tuple)
db.commit()
# gether book info
row = db.execute("SELECT * FROM books WHERE id = ?", (b_id,)).fetchone()
if row:
book = {}
book['id'] = row['id']
book['title'] = row['title']
book['author'] = row['author']
book['tags'] = row['tags']
for no in range(1, 5):
imgkey = "image{}".format(no)
book[imgkey] = row[imgkey]
return template('page_edit_book', book=book)
# on error
return abort(404, "book id {} is not found.".format(id))
@app.get('/app/edit/<book_id:int>')
def edit_book_form(book_id, db):
print("edit book {}".format(book_id))
# utf-8 workaround
db.text_factory = str
# gether book info
row = db.execute("SELECT * FROM books WHERE id = ?", (book_id,)).fetchone()
if row:
book = {}
book['id'] = row['id']
book['title'] = row['title']
book['author'] = row['author']
book['tags'] = row['tags']
for no in range(1, 5):
imgkey = "image{}".format(no)
book[imgkey] = row[imgkey]
return template('page_edit_book', book=book)
# on error
return abort(404, "book id {} is not found.".format(id))
@app.get('/app/add')
def add_newbook_form():
return template('page_add_book')
@app.post('/app/add')
def add_newbook_and_show_next_form(db):
# utf-8 workaround
db.text_factory = str
# for debug
print("forms")
for k, v in request.forms.items():
print("{} -> {}".format(k,v))
print("files")
for k, v in request.files.items():
print("{} -> {}".format(k,v))
# gether post information
b_title = request.forms.get('title')
b_author = request.forms.get('author')
b_tags = request.forms.get('tags')
# save files
image_keys = ['image1', 'image2', 'image3', 'image4']
file_ids = {}
for imkey in image_keys:
if imkey in request.files:
print("key {} - found".format(imkey))
img = request.files.get(imkey)
print("filename -> {}".format(img.filename))
img_name = img.filename
if img_name and not img_name.isspace():
print("filename -> {}".format(img_name))
ext = os.path.splitext(img_name)
im_uuid = uuid.uuid4()
im_filename = str(im_uuid) + ext[1]
print("save file name -> {}".format(im_filename))
im_path = os.path.abspath(DATA_PATH + "/" + im_filename)
with open(im_path, "wb") as fimg:
fimg.write(img.file.read())
file_ids[imkey] = im_filename
# query keys
img_query_keys = [key for key in image_keys if key in file_ids.keys()]
img_file_names = tuple(file_ids[key] for key in img_query_keys)
# build datetime
now = datetime.datetime.now()
b_created = now.strftime('%Y-%m-%d %H:%M:%S')
b_updated = now.strftime('%Y-%m-%d %H:%M:%S')
# build and execute query
all_keys = ['title', 'author', 'tags', 'created', 'updated'] + img_query_keys
query_marks = ['?' for k in all_keys]
query_param_tuple = (b_title, b_author, b_tags, b_created, b_created,) \
+ img_file_names
query = """
INSERT INTO books (
{}
) VALUES (
{}
)
"""[1:-1]
filled_query = query.format(
','.join(all_keys),
','.join(query_marks))
print("filled query -> " + filled_query)
db.execute(filled_query, query_param_tuple)
db.commit()
# show new form
return template('page_add_book')
@app.get('/image/<book_id:int>/<image_no:int>')
def get_book_image(book_id, image_no, db):
print("request -> book {} / image {}".format(book_id, image_no))
# check image_no
if not (image_no >= 1 and image_no <= 4):
return abort(404, 'image - wrong image_not')
# get image uuid
img_key = "image{}".format(image_no)
query = 'SELECT {} AS img_id FROM books WHERE id = ?'.format(img_key)
print("image query -> {}".format(query))
row = db.execute(query, (book_id,)).fetchone()
if row:
img_id = row['img_id']
file_path = os.path.abspath(DATA_PATH + "/" + img_id)
print("file -> {}".format(file_path))
if os.path.exists(file_path):
# TODO : not safe
return static_file(img_id, root=DATA_PATH)
abort(404, "file not found")
@app.error(404)
def on_404(error):
return '404, <a href="/">goto toppage</a>' + error.body
app.run(host=SERVER_ADDRESS, port=SERVER_PORTNO)
|
anhstudios/swganh | refs/heads/develop | data/scripts/templates/object/static/structure/tatooine/shared_shed_junkshop_watto.py | 2 | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Static()
result.template = "object/static/structure/tatooine/shared_shed_junkshop_watto.iff"
result.attribute_template_id = -1
result.stfName("string_table","shed_junkshop_watto")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
cjaymes/pyscap | refs/heads/master | src/scap/model/ocil_2_0/QuestionnaireIDPattern.py | 1 | # Copyright 2016 Casey Jaymes
# This file is part of PySCAP.
#
# PySCAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
from scap.model.xs.AnySimpleType import AnySimpleType
class QuestionnaireIDPattern(AnySimpleType):
pass
|
Ingenico-ePayments/connect-sdk-python2 | refs/heads/master | examples/merchant/payments/get_third_party_status_example.py | 2 | #
# This class was auto-generated from the API references found at
# https://epayments-api.developer-ingenico.com/s2sapi/v1/
#
import os
from ingenico.connect.sdk.factory import Factory
class GetThirdPartyStatusExample(object):
def example(self):
with self.__get_client() as client:
response = client.merchant("merchantId").payments().third_party_status("paymentId")
def __get_client(self):
api_key_id = os.getenv("connect.api.apiKeyId", "someKey")
secret_api_key = os.getenv("connect.api.secretApiKey", "someSecret")
configuration_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__),
'../../example_configuration.ini'))
return Factory.create_client_from_file(configuration_file_name=configuration_file_name,
api_key_id=api_key_id, secret_api_key=secret_api_key)
|
Sir-Henry-Curtis/Ironworks | refs/heads/master | builtinPlugins/sickbeard.py | 3 | from flask import jsonify, render_template, request, send_file, json
import urllib2
import base64
import StringIO
from maraschino import app
from maraschino.tools import *
import maraschino
def sickbeard_http():
if get_setting_value('sickbeard_https') == '1':
return 'https://'
else:
return 'http://'
def sickbeard_url():
port = get_setting_value('sickbeard_port')
url_base = get_setting_value('sickbeard_ip')
webroot = get_setting_value('sickbeard_webroot')
if port:
url_base = '%s:%s' % (url_base, port)
if webroot:
url_base = '%s/%s' % (url_base, webroot)
url = '%s/api/%s' % (url_base, get_setting_value('sickbeard_api'))
return sickbeard_http() + url
def sickbeard_url_no_api():
port = get_setting_value('sickbeard_port')
url_base = get_setting_value('sickbeard_ip')
webroot = get_setting_value('sickbeard_webroot')
if port:
url_base = '%s:%s' % (url_base, port)
if webroot:
url_base = '%s/%s' % (url_base, webroot)
return sickbeard_http() + url_base
def sickbeard_api(params=None, use_json=True, dev=False):
username = get_setting_value('sickbeard_user')
password = get_setting_value('sickbeard_password')
url = sickbeard_url() + params
r = urllib2.Request(url)
if username and password:
base64string = base64.encodestring('%s:%s' % (username, password)).replace('\n', '')
r.add_header("Authorization", "Basic %s" % base64string)
data = urllib2.urlopen(r).read()
if dev:
print url
print data
if use_json:
data = json.JSONDecoder().decode(data)
return data
@app.route('/xhr/sickbeard/')
def xhr_sickbeard():
params = '/?cmd=future&sort=date'
try:
sickbeard = sickbeard_api(params)
compact_view = get_setting_value('sickbeard_compact') == '1'
show_airdate = get_setting_value('sickbeard_airdate') == '1'
if sickbeard['result'].rfind('success') >= 0:
sickbeard = sickbeard['data']
for time in sickbeard:
for episode in sickbeard[time]:
episode['image'] = get_pic(episode['tvdbid'], 'banner')
except:
return render_template('sickbeard.html',
sickbeard='',
)
return render_template('sickbeard.html',
url=sickbeard_url_no_api(),
sickbeard=sickbeard,
missed=sickbeard['missed'],
today=sickbeard['today'],
soon=sickbeard['soon'],
later=sickbeard['later'],
compact_view=compact_view,
show_airdate=show_airdate,
)
@app.route('/sickbeard/search_ep/<tvdbid>/<season>/<episode>/')
@requires_auth
def search_ep(tvdbid, season, episode):
params = '/?cmd=episode.search&tvdbid=%s&season=%s&episode=%s' % (tvdbid, season, episode)
try:
sickbeard = sickbeard_api(params)
return jsonify(sickbeard)
except:
return jsonify({'result': False})
@app.route('/sickbeard/get_plot/<tvdbid>/<season>/<episode>/')
def get_plot(tvdbid, season, episode):
params = '/?cmd=episode&tvdbid=%s&season=%s&episode=%s' % (tvdbid, season, episode)
try:
sickbeard = sickbeard_api(params)
return sickbeard['data']['description']
except:
return ''
@app.route('/sickbeard/get_all/')
def get_all():
params = '/?cmd=shows&sort=name'
try:
sickbeard = sickbeard_api(params)
except:
raise Exception
if sickbeard['result'].rfind('success') >= 0:
sickbeard = sickbeard['data']
for show in sickbeard:
sickbeard[show]['url'] = get_pic(sickbeard[show]['tvdbid'], 'banner')
return render_template('sickbeard/all.html',
sickbeard=sickbeard,
)
@app.route('/sickbeard/get_show_info/<tvdbid>/')
def show_info(tvdbid):
params = '/?cmd=show&tvdbid=%s' % tvdbid
try:
sickbeard = sickbeard_api(params)
except:
raise Exception
if sickbeard['result'].rfind('success') >= 0:
sickbeard = sickbeard['data']
sickbeard['url'] = get_pic(tvdbid, 'banner')
sickbeard['tvdb'] = tvdbid
return render_template('sickbeard/show.html',
sickbeard=sickbeard,
)
@app.route('/sickbeard/get_season/<tvdbid>/<season>/')
def get_season(tvdbid, season):
params = '/?cmd=show.seasons&tvdbid=%s&season=%s' % (tvdbid, season)
try:
sickbeard = sickbeard_api(params)
except:
raise Exception
if sickbeard['result'].rfind('success') >= 0:
sickbeard = sickbeard['data']
return render_template('sickbeard/season.html',
sickbeard=sickbeard,
id=tvdbid,
season=season,
)
@app.route('/sickbeard/history/<limit>/')
def history(limit):
params = '/?cmd=history&limit=%s' % limit
try:
sickbeard = sickbeard_api(params)
except:
raise Exception
if sickbeard['result'].rfind('success') >= 0:
sickbeard = sickbeard['data']
for show in sickbeard:
show['image'] = get_pic(show['tvdbid'])
return render_template('sickbeard/history.html',
sickbeard=sickbeard,
)
# returns a link with the path to the required image from SB
def get_pic(tvdb, style='banner'):
return '%s/sickbeard/get_%s/%s' % (maraschino.WEBROOT, style, tvdb)
@app.route('/sickbeard/get_ep_info/<tvdbid>/<season>/<ep>/')
def get_episode_info(tvdbid, season, ep):
params = '/?cmd=episode&tvdbid=%s&season=%s&episode=%s&full_path=1' % (tvdbid, season, ep)
try:
sickbeard = sickbeard_api(params)
except:
raise Exception
if sickbeard['result'].rfind('success') >= 0:
sickbeard = sickbeard['data']
return render_template('sickbeard/episode.html',
sickbeard=sickbeard,
id=tvdbid,
season=season,
ep=ep,
)
@app.route('/sickbeard/set_ep_status/<tvdbid>/<season>/<ep>/<st>/')
def set_episode_status(tvdbid, season, ep, st):
params = '/?cmd=episode.setstatus&tvdbid=%s&season=%s&episode=%s&status=%s' % (tvdbid, season, ep, st)
try:
sickbeard = sickbeard_api(params)
except:
raise Exception
status = 'error'
if sickbeard['result'] == 'success':
status = 'success'
return jsonify({'status': status})
@app.route('/sickbeard/shutdown/')
def shutdown():
params = '/?cmd=sb.shutdown'
try:
sickbeard = sickbeard_api(params)
except:
raise Exception
return sickbeard['message']
@app.route('/sickbeard/restart/')
def restart():
params = '/?cmd=sb.restart'
try:
sickbeard = sickbeard_api(params)
except:
raise Exception
return sickbeard['message']
@app.route('/sickbeard/search/')
def sb_search():
sickbeard = {}
params = ''
try:
params = '&name=%s' % (urllib2.quote(request.args['name']))
except:
pass
try:
params = '&tvdbid=%s' % (urllib2.quote(request.args['tvdbid']))
except:
pass
try:
params = '&lang=%s' % (urllib2.quote(request.args['lang']))
except:
pass
if params is not '':
params = '/?cmd=sb.searchtvdb%s' % params
try:
sickbeard = sickbeard_api(params)
sickbeard = sickbeard['data']['results']
except:
sickbeard = None
else:
sickbeard = None
return render_template('sickbeard/search.html',
data=sickbeard,
sickbeard='results',
)
@app.route('/sickbeard/add_show/<tvdbid>/')
def add_show(tvdbid):
params = '/?cmd=show.addnew&tvdbid=%s' % tvdbid
try:
sickbeard = sickbeard_api(params)
except:
raise Exception
return sickbeard['message']
@app.route('/sickbeard/get_banner/<tvdbid>/')
def get_banner(tvdbid):
params = '/?cmd=show.getbanner&tvdbid=%s' % tvdbid
img = StringIO.StringIO(sickbeard_api(params, use_json=False))
return send_file(img, mimetype='image/jpeg')
@app.route('/sickbeard/get_poster/<tvdbid>/')
def get_poster(tvdbid):
params = '/?cmd=show.getposter&tvdbid=%s' % tvdbid
img = StringIO.StringIO(sickbeard_api(params, use_json=False))
return send_file(img, mimetype='image/jpeg')
@app.route('/sickbeard/log/<level>/')
def log(level):
params = '/?cmd=logs&min_level=%s' % level
try:
sickbeard = sickbeard_api(params)
if sickbeard['result'].rfind('success') >= 0:
sickbeard = sickbeard['data']
if not sickbeard:
sickbeard = ['The %s log is empty' % level]
except:
sickbeard = None
return render_template('sickbeard/log.html',
sickbeard=sickbeard,
level=level,
)
@app.route('/sickbeard/delete_show/<tvdbid>/')
def delete_show(tvdbid):
params = '/?cmd=show.delete&tvdbid=%s' % tvdbid
try:
sickbeard = sickbeard_api(params)
except:
raise Exception
return sickbeard['message']
@app.route('/sickbeard/refresh_show/<tvdbid>/')
def refresh_show(tvdbid):
params = '/?cmd=show.refresh&tvdbid=%s' % tvdbid
try:
sickbeard = sickbeard_api(params)
except:
raise Exception
return sickbeard['message']
@app.route('/sickbeard/update_show/<tvdbid>/')
def update_show(tvdbid):
params = '/?cmd=show.update&tvdbid=%s' % tvdbid
try:
sickbeard = sickbeard_api(params)
except:
raise Exception
return sickbeard['message']
|
shtouff/django | refs/heads/master | django/views/generic/list.py | 471 | from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from django.core.paginator import InvalidPage, Paginator
from django.db.models.query import QuerySet
from django.http import Http404
from django.utils import six
from django.utils.translation import ugettext as _
from django.views.generic.base import ContextMixin, TemplateResponseMixin, View
class MultipleObjectMixin(ContextMixin):
"""
A mixin for views manipulating multiple objects.
"""
allow_empty = True
queryset = None
model = None
paginate_by = None
paginate_orphans = 0
context_object_name = None
paginator_class = Paginator
page_kwarg = 'page'
ordering = None
def get_queryset(self):
"""
Return the list of items for this view.
The return value must be an iterable and may be an instance of
`QuerySet` in which case `QuerySet` specific behavior will be enabled.
"""
if self.queryset is not None:
queryset = self.queryset
if isinstance(queryset, QuerySet):
queryset = queryset.all()
elif self.model is not None:
queryset = self.model._default_manager.all()
else:
raise ImproperlyConfigured(
"%(cls)s is missing a QuerySet. Define "
"%(cls)s.model, %(cls)s.queryset, or override "
"%(cls)s.get_queryset()." % {
'cls': self.__class__.__name__
}
)
ordering = self.get_ordering()
if ordering:
if isinstance(ordering, six.string_types):
ordering = (ordering,)
queryset = queryset.order_by(*ordering)
return queryset
def get_ordering(self):
"""
Return the field or fields to use for ordering the queryset.
"""
return self.ordering
def paginate_queryset(self, queryset, page_size):
"""
Paginate the queryset, if needed.
"""
paginator = self.get_paginator(
queryset, page_size, orphans=self.get_paginate_orphans(),
allow_empty_first_page=self.get_allow_empty())
page_kwarg = self.page_kwarg
page = self.kwargs.get(page_kwarg) or self.request.GET.get(page_kwarg) or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_("Page is not 'last', nor can it be converted to an int."))
try:
page = paginator.page(page_number)
return (paginator, page, page.object_list, page.has_other_pages())
except InvalidPage as e:
raise Http404(_('Invalid page (%(page_number)s): %(message)s') % {
'page_number': page_number,
'message': str(e)
})
def get_paginate_by(self, queryset):
"""
Get the number of items to paginate by, or ``None`` for no pagination.
"""
return self.paginate_by
def get_paginator(self, queryset, per_page, orphans=0,
allow_empty_first_page=True, **kwargs):
"""
Return an instance of the paginator for this view.
"""
return self.paginator_class(
queryset, per_page, orphans=orphans,
allow_empty_first_page=allow_empty_first_page, **kwargs)
def get_paginate_orphans(self):
"""
Returns the maximum number of orphans extend the last page by when
paginating.
"""
return self.paginate_orphans
def get_allow_empty(self):
"""
Returns ``True`` if the view should display empty lists, and ``False``
if a 404 should be raised instead.
"""
return self.allow_empty
def get_context_object_name(self, object_list):
"""
Get the name of the item to be used in the context.
"""
if self.context_object_name:
return self.context_object_name
elif hasattr(object_list, 'model'):
return '%s_list' % object_list.model._meta.model_name
else:
return None
def get_context_data(self, **kwargs):
"""
Get the context for this view.
"""
queryset = kwargs.pop('object_list', self.object_list)
page_size = self.get_paginate_by(queryset)
context_object_name = self.get_context_object_name(queryset)
if page_size:
paginator, page, queryset, is_paginated = self.paginate_queryset(queryset, page_size)
context = {
'paginator': paginator,
'page_obj': page,
'is_paginated': is_paginated,
'object_list': queryset
}
else:
context = {
'paginator': None,
'page_obj': None,
'is_paginated': False,
'object_list': queryset
}
if context_object_name is not None:
context[context_object_name] = queryset
context.update(kwargs)
return super(MultipleObjectMixin, self).get_context_data(**context)
class BaseListView(MultipleObjectMixin, View):
"""
A base view for displaying a list of objects.
"""
def get(self, request, *args, **kwargs):
self.object_list = self.get_queryset()
allow_empty = self.get_allow_empty()
if not allow_empty:
# When pagination is enabled and object_list is a queryset,
# it's better to do a cheap query than to load the unpaginated
# queryset in memory.
if (self.get_paginate_by(self.object_list) is not None
and hasattr(self.object_list, 'exists')):
is_empty = not self.object_list.exists()
else:
is_empty = len(self.object_list) == 0
if is_empty:
raise Http404(_("Empty list and '%(class_name)s.allow_empty' is False.")
% {'class_name': self.__class__.__name__})
context = self.get_context_data()
return self.render_to_response(context)
class MultipleObjectTemplateResponseMixin(TemplateResponseMixin):
"""
Mixin for responding with a template and list of objects.
"""
template_name_suffix = '_list'
def get_template_names(self):
"""
Return a list of template names to be used for the request. Must return
a list. May not be called if render_to_response is overridden.
"""
try:
names = super(MultipleObjectTemplateResponseMixin, self).get_template_names()
except ImproperlyConfigured:
# If template_name isn't specified, it's not a problem --
# we just start with an empty list.
names = []
# If the list is a queryset, we'll invent a template name based on the
# app and model name. This name gets put at the end of the template
# name list so that user-supplied names override the automatically-
# generated ones.
if hasattr(self.object_list, 'model'):
opts = self.object_list.model._meta
names.append("%s/%s%s.html" % (opts.app_label, opts.model_name, self.template_name_suffix))
return names
class ListView(MultipleObjectTemplateResponseMixin, BaseListView):
"""
Render some list of objects, set by `self.model` or `self.queryset`.
`self.queryset` can actually be any iterable of items, not just a queryset.
"""
|
alimanfoo/csvvalidator | refs/heads/master | tests.py | 2 | """
Tests for the `csvvalidator` module.
"""
import logging
import math
from csvvalidator import CSVValidator, VALUE_CHECK_FAILED, MESSAGES,\
HEADER_CHECK_FAILED, RECORD_LENGTH_CHECK_FAILED, enumeration, match_pattern,\
search_pattern, number_range_inclusive, number_range_exclusive,\
VALUE_PREDICATE_FALSE, RECORD_PREDICATE_FALSE, UNIQUE_CHECK_FAILED,\
ASSERT_CHECK_FAILED, UNEXPECTED_EXCEPTION, write_problems, datetime_string,\
RECORD_CHECK_FAILED, datetime_range_inclusive, datetime_range_exclusive,\
RecordError
# logging setup
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(levelname)s - %(funcName)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
debug, info, warning, error = logger.debug, logger.info, logger.warning, logger.error
def test_value_checks():
"""Some very simple tests of value checks."""
# a simple validator to be tested
field_names=('foo', 'bar')
validator = CSVValidator(field_names)
validator.add_value_check('foo', int)
validator.add_value_check('bar', float)
# some test data
data = (
('foo', 'bar'), # row 1 - header row
('12', '3.4'), # row 2 - valid
('1.2', '3.4'), # row 3 - foo invalid
('abc', '3.4'), # row 4 - foo invalid
('12', 'abc'), # row 5 - bar invalid
('', '3.4'), # row 6 - foo invalid (empty)
('12', ''), # row 7 - bar invalid (empty)
('abc', 'def') # row 8 - both invalid
)
# run the validator on the test data
problems = validator.validate(data)
assert len(problems) == 7
# N.B., expect row and column indices start from 1
problems_row2 = [p for p in problems if p['row'] == 2]
assert len(problems_row2) == 0 # should be valid
problems_row3 = [p for p in problems if p['row'] == 3]
assert len(problems_row3) == 1
p = problems_row3[0] # convenience variable
assert p['column'] == 1 # report column index
assert p['field'] == 'foo' # report field name
assert p['code'] == VALUE_CHECK_FAILED # default problem code for value checks
assert p['message'] == MESSAGES[VALUE_CHECK_FAILED] # default message
assert p['value'] == '1.2' # report bad value
assert p['record'] == ('1.2', '3.4') # report record
problems_row4 = [p for p in problems if p['row'] == 4]
assert len(problems_row4) == 1
p = problems_row4[0] # convenience variable
assert p['column'] == 1
assert p['field'] == 'foo'
assert p['code'] == VALUE_CHECK_FAILED
assert p['message'] == MESSAGES[VALUE_CHECK_FAILED]
assert p['value'] == 'abc'
assert p['record'] == ('abc', '3.4')
problems_row5 = [p for p in problems if p['row'] == 5]
assert len(problems_row5) == 1
p = problems_row5[0] # convenience variable
assert p['column'] == 2
assert p['field'] == 'bar'
assert p['code'] == VALUE_CHECK_FAILED
assert p['message'] == MESSAGES[VALUE_CHECK_FAILED]
assert p['value'] == 'abc'
assert p['record'] == ('12', 'abc')
problems_row6 = [p for p in problems if p['row'] == 6]
assert len(problems_row6) == 1
p = problems_row6[0] # convenience variable
assert p['column'] == 1
assert p['field'] == 'foo'
assert p['code'] == VALUE_CHECK_FAILED
assert p['message'] == MESSAGES[VALUE_CHECK_FAILED]
assert p['value'] == ''
assert p['record'] == ('', '3.4')
problems_row7 = [p for p in problems if p['row'] == 7]
assert len(problems_row7) == 1
p = problems_row7[0] # convenience variable
assert p['column'] == 2
assert p['field'] == 'bar'
assert p['code'] == VALUE_CHECK_FAILED
assert p['message'] == MESSAGES[VALUE_CHECK_FAILED]
assert p['value'] == ''
assert p['record'] == ('12', '')
problems_row8 = [p for p in problems if p['row'] == 8]
assert len(problems_row8) == 2 # expect both problems are found
p0 = problems_row8[0] # convenience variable
assert p0['column'] == 1
assert p0['field'] == 'foo'
assert p0['code'] == VALUE_CHECK_FAILED
assert p0['message'] == MESSAGES[VALUE_CHECK_FAILED]
assert p0['value'] == 'abc'
assert p0['record'] == ('abc', 'def')
p1 = problems_row8[1] # convenience variable
assert p1['column'] == 2
assert p1['field'] == 'bar'
assert p1['code'] == VALUE_CHECK_FAILED
assert p1['message'] == MESSAGES[VALUE_CHECK_FAILED]
assert p1['value'] == 'def'
assert p1['record'] == ('abc', 'def')
def test_header_check():
"""Test the header checks work."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
validator.add_header_check() # use default code and message
validator.add_header_check(code='X1', message='custom message') # provide custom code and message
data = (
('foo', 'baz'),
('123', '456')
)
problems = validator.validate(data)
assert len(problems) == 2
p0 = problems[0]
assert p0['code'] == HEADER_CHECK_FAILED
assert p0['message'] == MESSAGES[HEADER_CHECK_FAILED]
assert p0['record'] == ('foo', 'baz')
assert p0['missing'] == set(['bar'])
assert p0['unexpected'] == set(['baz'])
assert p0['row'] == 1
p1 = problems[1]
assert p1['code'] == 'X1'
assert p1['message'] == 'custom message'
assert p1['missing'] == set(['bar'])
assert p1['unexpected'] == set(['baz'])
assert p1['record'] == ('foo', 'baz')
assert p1['row'] == 1
def test_ignore_lines():
"""Test instructions to ignore lines works."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
validator.add_header_check()
validator.add_value_check('foo', int)
validator.add_value_check('bar', float)
data = (
('ignore', 'me', 'please'),
('ignore', 'me', 'too', 'please'),
('foo', 'baz'),
('1.2', 'abc')
)
problems = validator.validate(data, ignore_lines=2)
assert len(problems) == 3
header_problems = [p for p in problems if p['code'] == HEADER_CHECK_FAILED]
assert len(header_problems) == 1
assert header_problems[0]['row'] == 3
value_problems = [p for p in problems if p['code'] == VALUE_CHECK_FAILED]
assert len(value_problems) == 2
for p in value_problems:
assert p['row'] == 4
def test_record_length_checks():
"""Test the record length checks."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
validator.add_record_length_check() # test default code and message
validator.add_record_length_check('X2', 'custom message')
data = (
('foo', 'bar'),
('12', '3.4'),
('12',), # be careful with syntax for singleton tuples
('12', '3.4', 'spong')
)
problems = validator.validate(data)
assert len(problems) == 4, len(problems)
# find problems reported under default code
default_problems = [p for p in problems if p['code'] == RECORD_LENGTH_CHECK_FAILED]
assert len(default_problems) == 2
d0 = default_problems[0]
assert d0['message'] == MESSAGES[RECORD_LENGTH_CHECK_FAILED]
assert d0['row'] == 3
assert d0['record'] == ('12',)
assert d0['length'] == 1
d1 = default_problems[1]
assert d1['message'] == MESSAGES[RECORD_LENGTH_CHECK_FAILED]
assert d1['row'] == 4
assert d1['record'] == ('12', '3.4', 'spong')
assert d1['length'] == 3
# find problems reported under custom code
custom_problems = [p for p in problems if p['code'] == 'X2']
assert len(custom_problems) == 2
c0 = custom_problems[0]
assert c0['message'] == 'custom message'
assert c0['row'] == 3
assert c0['record'] == ('12',)
assert c0['length'] == 1
c1 = custom_problems[1]
assert c1['message'] == 'custom message'
assert c1['row'] == 4
assert c1['record'] == ('12', '3.4', 'spong')
assert c1['length'] == 3
def test_value_checks_with_missing_values():
"""
Establish expected behaviour for value checks where there are missing values
in the records.
"""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
validator.add_value_check('bar', float)
data = (
('foo', 'bar'),
('12',) # this is missing value for bar, what happens to value check?
)
problems = validator.validate(data)
# missing values are ignored - use record length checks to find these
assert len(problems) == 0
def test_value_check_enumeration():
"""Test value checks with the enumeration() function."""
field_names = ('foo', 'bar', 'baz')
validator = CSVValidator(field_names)
# define an enumeration directly with arguments
validator.add_value_check('bar', enumeration('M', 'F'))
# define an enumeration by passing in a list or tuple
flavours = ('chocolate', 'vanilla', 'strawberry')
validator.add_value_check('baz', enumeration(flavours))
data = (
('foo', 'bar', 'baz'),
('1', 'M', 'chocolate'),
('2', 'F', 'maple pecan'),
('3', 'X', 'strawberry')
)
problems = validator.validate(data)
assert len(problems) == 2
p0 = problems[0]
assert p0['code'] == VALUE_CHECK_FAILED
assert p0['row'] == 3
assert p0['column'] == 3
assert p0['field'] == 'baz'
assert p0['value'] == 'maple pecan'
assert p0['record'] == ('2', 'F', 'maple pecan')
p1 = problems[1]
assert p1['code'] == VALUE_CHECK_FAILED
assert p1['row'] == 4
assert p1['column'] == 2
assert p1['field'] == 'bar'
assert p1['value'] == 'X'
assert p1['record'] == ('3', 'X', 'strawberry')
def test_value_check_match_pattern():
"""Test value checks with the match_pattern() function."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
validator.add_value_check('bar', match_pattern('\d{4}-\d{2}-\d{2}'))
data = (
('foo', 'bar'),
('1', '1999-01-01'),
('2', 'abcd-ef-gh'),
('3', 'a1999-01-01'),
('4', '1999-01-01a') # this is valid - pattern attempts to match at beginning of line
)
problems = validator.validate(data)
assert len(problems) == 2, len(problems)
for p in problems:
assert p['code'] == VALUE_CHECK_FAILED
assert problems[0]['row'] == 3
assert problems[1]['row'] == 4
def test_value_check_search_pattern():
"""Test value checks with the search_pattern() function."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
validator.add_value_check('bar', search_pattern('\d{4}-\d{2}-\d{2}'))
data = (
('foo', 'bar'),
('1', '1999-01-01'),
('2', 'abcd-ef-gh'),
('3', 'a1999-01-01'), # this is valid - pattern attempts to match anywhere in line
('4', '1999-01-01a') # this is valid - pattern attempts to match anywhere in line
)
problems = validator.validate(data)
assert len(problems) == 1, len(problems)
assert problems[0]['code'] == VALUE_CHECK_FAILED
assert problems[0]['row'] == 3
def test_value_check_numeric_ranges():
"""Test value checks with numerical range functions."""
field_names = ('foo', 'bar', 'baz', 'quux')
validator = CSVValidator(field_names)
validator.add_value_check('foo', number_range_inclusive(2, 6, int))
validator.add_value_check('bar', number_range_exclusive(2, 6, int))
validator.add_value_check('baz', number_range_inclusive(2.0, 6.3, float))
validator.add_value_check('quux', number_range_exclusive(2.0, 6.3, float))
data = (
('foo', 'bar', 'baz', 'quux'),
('2', '3', '2.0', '2.1'), # valid
('1', '3', '2.0', '2.1'), # foo invalid
('2', '2', '2.0', '2.1'), # bar invalid
('2', '3', '1.9', '2.1'), # baz invalid
('2', '3', '2.0', '2.0') # quux invalid
)
problems = validator.validate(data)
assert len(problems) == 4, len(problems)
for p in problems:
assert p['code'] == VALUE_CHECK_FAILED
assert problems[0]['row'] == 3 and problems[0]['field'] == 'foo'
assert problems[1]['row'] == 4 and problems[1]['field'] == 'bar'
assert problems[2]['row'] == 5 and problems[2]['field'] == 'baz'
assert problems[3]['row'] == 6 and problems[3]['field'] == 'quux'
def test_value_checks_datetime():
"""Test value checks with datetimes."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
validator.add_value_check('bar', datetime_string('%Y-%m-%d'))
data = (
('foo', 'bar'),
('A', '1999-09-09'), # valid
('B', '1999-13-09'), # invalid month
('C', '1999-09-32'), # invalid day
('D', '1999-09-09ss') # invalid string
)
problems = validator.validate(data)
assert len(problems) == 3, problems
for p in problems:
assert p['code'] == VALUE_CHECK_FAILED
assert problems[0]['row'] == 3 and problems[0]['field'] == 'bar'
assert problems[1]['row'] == 4 and problems[1]['field'] == 'bar'
assert problems[2]['row'] == 5 and problems[2]['field'] == 'bar'
def test_value_checks_datetime_range():
"""Test value checks with datetime ranges."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
validator.add_value_check('bar', datetime_range_inclusive('1999-09-09',
'2009-09-09',
'%Y-%m-%d'))
validator.add_value_check('bar', datetime_range_exclusive('1999-09-09',
'2009-09-09',
'%Y-%m-%d'))
data = (
('foo', 'bar'),
('A', '1999-09-10'), # valid
('B', '1999-09-09'), # invalid (exclusive)
('C', '2009-09-09'), # invalid (exclusive)
('D', '1999-09-08'), # invalid (both)
('E', '2009-09-10') # invalid (both)
)
problems = validator.validate(data)
assert len(problems) == 6, len(problems)
assert len([p for p in problems if p['row'] == 3]) == 1
assert len([p for p in problems if p['row'] == 4]) == 1
assert len([p for p in problems if p['row'] == 5]) == 2
assert len([p for p in problems if p['row'] == 6]) == 2
def test_value_predicates():
"""Test the use of value predicates."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
foo_predicate = lambda v: math.pow(float(v), 2) < 64
validator.add_value_predicate('foo', foo_predicate)
bar_predicate = lambda v: math.sqrt(float(v)) > 8
validator.add_value_predicate('bar', bar_predicate, 'X3', 'custom message')
data = (
('foo', 'bar'),
('4', '81'), # valid
('9', '81'), # foo invalid
('4', '49') # bar invalid
)
problems = validator.validate(data)
assert len(problems) == 2, len(problems)
p0 = problems[0]
assert p0['code'] == VALUE_PREDICATE_FALSE
assert p0['message'] == MESSAGES[VALUE_PREDICATE_FALSE]
assert p0['row'] == 3
assert p0['column'] == 1
assert p0['field'] == 'foo'
assert p0['value'] == '9'
assert p0['record'] == ('9', '81')
p1 = problems[1]
assert p1['code'] == 'X3'
assert p1['message'] == 'custom message'
assert p1['row'] == 4
assert p1['column'] == 2
assert p1['field'] == 'bar'
assert p1['value'] == '49'
assert p1['record'] == ('4', '49')
def test_record_checks():
"""Test the use of record checks."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
def foo_gt_bar(r):
foo = int(r['foo'])
bar = int(r['bar'])
if foo < bar:
raise RecordError
validator.add_record_check(foo_gt_bar) # use default code and message
def foo_gt_2bar(r):
foo = int(r['foo'])
bar = int(r['bar'])
if foo < 2 * bar:
raise RecordError('X4', 'custom message')
validator.add_record_check(foo_gt_2bar)
data = (
('foo', 'bar'),
('7', '3'), # valid
('5', '3'), # invalid - not foo_gt_2bar
('1', '3') # invalid - both predicates false
)
problems = validator.validate(data)
n = len(problems)
assert n == 3, n
row3_problems = [p for p in problems if p['row'] == 3]
assert len(row3_problems) == 1
p = row3_problems[0]
assert p['code'] == 'X4'
assert p['message'] == 'custom message'
assert p['record'] == ('5', '3')
row4_problems = [p for p in problems if p['row'] == 4]
assert len(row4_problems) == 2
row4_problems_default = [p for p in row4_problems if p['code'] == RECORD_CHECK_FAILED]
assert len(row4_problems_default) == 1
p = row4_problems_default[0]
assert p['message'] == MESSAGES[RECORD_CHECK_FAILED]
assert p['record'] == ('1', '3')
row4_problems_custom = [p for p in row4_problems if p['code'] == 'X4']
assert len(row4_problems_custom) == 1
p = row4_problems_custom[0]
assert p['message'] == 'custom message'
assert p['record'] == ('1', '3')
def test_record_predicates():
"""Test the use of record predicates."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
def foo_gt_bar(r):
return int(r['foo']) > int(r['bar']) # expect record will be a dictionary
validator.add_record_predicate(foo_gt_bar) # use default code and message
def foo_gt_2bar(r):
return int(r['foo']) > 2 * int(r['bar'])
validator.add_record_predicate(foo_gt_2bar, 'X4', 'custom message')
data = (
('foo', 'bar'),
('7', '3'), # valid
('5', '3'), # invalid - not foo_gt_2bar
('1', '3') # invalid - both predicates false
)
problems = validator.validate(data)
n = len(problems)
assert n == 3, n
row3_problems = [p for p in problems if p['row'] == 3]
assert len(row3_problems) == 1
p = row3_problems[0]
assert p['code'] == 'X4'
assert p['message'] == 'custom message'
assert p['record'] == ('5', '3')
row4_problems = [p for p in problems if p['row'] == 4]
assert len(row4_problems) == 2
row4_problems_default = [p for p in row4_problems if p['code'] == RECORD_PREDICATE_FALSE]
assert len(row4_problems_default) == 1
p = row4_problems_default[0]
assert p['message'] == MESSAGES[RECORD_PREDICATE_FALSE]
assert p['record'] == ('1', '3')
row4_problems_custom = [p for p in row4_problems if p['code'] == 'X4']
assert len(row4_problems_custom) == 1
p = row4_problems_custom[0]
assert p['message'] == 'custom message'
assert p['record'] == ('1', '3')
def test_unique_checks():
"""Test the uniqueness checks."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
validator.add_unique_check('foo')
data = (
('foo', 'bar'),
('1', 'A'),
('2', 'B'),
('1', 'C')
)
problems = validator.validate(data)
n = len(problems)
assert n == 1, n
p = problems[0]
assert p['code'] == UNIQUE_CHECK_FAILED
assert p['message'] == MESSAGES[UNIQUE_CHECK_FAILED]
assert p['row'] == 4
assert p['key'] == 'foo'
assert p['value'] == '1'
assert p['record'] == ('1', 'C')
def test_unique_checks_with_variable_record_lengths():
"""Test the uniqueness checks still work when record lengths vary."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
validator.add_unique_check('bar')
data = (
('foo', 'bar'),
('1', 'A'),
('2'),
('3', 'A')
)
problems = validator.validate(data)
n = len(problems)
assert n == 1, n
p = problems[0]
assert p['code'] == UNIQUE_CHECK_FAILED
assert p['message'] == MESSAGES[UNIQUE_CHECK_FAILED]
assert p['row'] == 4
assert p['key'] == 'bar'
assert p['value'] == 'A'
assert p['record'] == ('3', 'A')
def test_compound_unique_checks():
"""Test the uniqueness checks on compound keys."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
validator.add_unique_check(('foo', 'bar'), 'X5', 'custom message')
data = (
('foo', 'bar'),
('1', 'A'),
('2', 'B'),
('1', 'B'),
('2', 'A'),
('1', 'A')
)
problems = validator.validate(data)
n = len(problems)
assert n == 1, n
p = problems[0]
assert p['code'] == 'X5'
assert p['message'] == 'custom message'
assert p['row'] == 6
assert p['key'] == ('foo', 'bar')
assert p['value'] == ('1', 'A')
assert p['record'] == ('1', 'A')
def test_compound_unique_checks_with_variable_record_lengths():
"""Test the uniqueness checks on compound keys when record lengths vary."""
field_names = ('something', 'foo', 'bar')
validator = CSVValidator(field_names)
validator.add_unique_check(('foo', 'bar'), 'X5', 'custom message')
data = (
('something', 'foo', 'bar'),
('Z', '1', 'A'),
('Z', '2', 'B'),
('Z'),
('Z', '2', 'A'),
('Z', '1', 'A')
)
problems = validator.validate(data)
print problems
n = len(problems)
assert n == 1, n
p = problems[0]
assert p['code'] == 'X5'
assert p['message'] == 'custom message'
assert p['row'] == 6
assert p['key'] == ('foo', 'bar')
assert p['value'] == ('1', 'A')
assert p['record'] == ('Z', '1', 'A')
def test_assert_methods():
"""Test use of 'assert' methods."""
# define a custom validator class
class MyValidator(CSVValidator):
def __init__(self, threshold):
field_names = ('foo', 'bar')
super(MyValidator, self).__init__(field_names)
self._threshold = threshold
def assert_foo_plus_bar_gt_threshold(self, r):
assert int(r['foo']) + int(r['bar']) > self._threshold # use default error code and message
def assert_foo_times_bar_gt_threshold(self, r):
assert int(r['foo']) * int(r['bar']) > self._threshold, ('X6', 'custom message')
validator = MyValidator(42)
data = (
('foo', 'bar'),
('33', '10'), # valid
('7', '8'), # invalid (foo + bar less than threshold)
('3', '4'), # invalid (both)
)
problems = validator.validate(data)
n = len(problems)
assert n == 3, n
row3_problems = [p for p in problems if p['row'] == 3]
assert len(row3_problems) == 1
p = row3_problems[0]
assert p['code'] == ASSERT_CHECK_FAILED
assert p['message'] == MESSAGES[ASSERT_CHECK_FAILED]
assert p['record'] == ('7', '8')
row4_problems = [p for p in problems if p['row'] == 4]
assert len(row4_problems) == 2
row4_problems_custom = [p for p in row4_problems if p['code'] == 'X6']
assert len(row4_problems_custom) == 1, row4_problems
p = row4_problems_custom[0]
assert p['message'] == 'custom message'
assert p['record'] == ('3', '4')
row4_problems_default = [p for p in row4_problems if p['code'] == ASSERT_CHECK_FAILED]
assert len(row4_problems_default) == 1
p = row4_problems_default[0]
assert p['message'] == MESSAGES[ASSERT_CHECK_FAILED]
assert p['record'] == ('3', '4')
def test_check_methods():
"""Test use of 'check' methods."""
# define a custom validator class
class MyValidator(CSVValidator):
def __init__(self, threshold):
field_names = ('foo', 'bar')
super(MyValidator, self).__init__(field_names)
self._threshold = threshold
def check_foo_plus_bar_gt_threshold(self, r):
if int(r['foo']) + int(r['bar']) <= self._threshold:
raise RecordError # use default error code and message
def check_foo_times_bar_gt_threshold(self, r):
if int(r['foo']) * int(r['bar']) <= self._threshold:
raise RecordError('X6', 'custom message')
validator = MyValidator(42)
data = (
('foo', 'bar'),
('33', '10'), # valid
('7', '8'), # invalid (foo + bar less than threshold)
('3', '4'), # invalid (both)
)
problems = validator.validate(data)
n = len(problems)
assert n == 3, n
row3_problems = [p for p in problems if p['row'] == 3]
assert len(row3_problems) == 1
p = row3_problems[0]
assert p['code'] == RECORD_CHECK_FAILED
assert p['message'] == MESSAGES[RECORD_CHECK_FAILED]
assert p['record'] == ('7', '8')
row4_problems = [p for p in problems if p['row'] == 4]
assert len(row4_problems) == 2
row4_problems_custom = [p for p in row4_problems if p['code'] == 'X6']
assert len(row4_problems_custom) == 1
p = row4_problems_custom[0]
assert p['message'] == 'custom message'
assert p['record'] == ('3', '4')
row4_problems_default = [p for p in row4_problems if p['code'] == RECORD_CHECK_FAILED]
assert len(row4_problems_default) == 1
p = row4_problems_default[0]
assert p['message'] == MESSAGES[RECORD_CHECK_FAILED]
assert p['record'] == ('3', '4')
def test_each_and_finally_assert_methods():
"""Test 'each' and 'finally_assert' methods."""
# define a custom validator class
class MyValidator(CSVValidator):
def __init__(self, threshold):
field_names = ('foo', 'bar')
super(MyValidator, self).__init__(field_names)
self._threshold = threshold
self._bars = []
self._count = 0
def each_store_bar(self, r):
n = float(r['bar'])
self._bars.append(n)
self._count += 1
def finally_assert_mean_bar_gt_threshold(self):
mean = sum(self._bars) / self._count
assert mean > self._threshold, ('X7', 'custom message')
data = [
['foo', 'bar'],
['A', '2'],
['B', '3'],
['C', '7']
]
validator = MyValidator(5.0)
problems = validator.validate(data)
assert len(problems) == 1
p = problems[0]
assert p['code'] == 'X7'
assert p['message'] == 'custom message'
data.append(['D', '10'])
validator = MyValidator(5.0)
problems = validator.validate(data)
assert len(problems) == 0
def test_exception_handling():
"""Establish expectations for exception handling."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
validator.add_value_check('foo', int)
def buggy_value_check(v):
"""I am a buggy value check."""
raise Exception('something went wrong')
validator.add_value_check('bar', buggy_value_check)
def buggy_value_predicate(v):
"""I am a buggy value predicate."""
raise Exception('something went wrong')
validator.add_value_predicate('bar', buggy_value_predicate)
def buggy_record_check(r):
"""I am a buggy record check."""
raise Exception('something went wrong')
validator.add_record_check(buggy_record_check)
def buggy_record_predicate(r):
"""I am a buggy record predicate."""
raise Exception('something went wrong')
validator.add_record_predicate(buggy_record_predicate)
def buggy_assert(r):
"""I am a buggy assert."""
raise Exception('something went wrong')
validator.assert_something_buggy = buggy_assert
def buggy_check(r):
"""I am a buggy check."""
raise Exception('something went wrong')
validator.check_something_buggy = buggy_check
def buggy_each(r):
"""I am a buggy each."""
raise Exception('something went wrong')
validator.each_something_buggy = buggy_each
def buggy_finally_assert():
"""I am a buggy finally assert."""
raise Exception('something went wrong')
validator.finally_assert_something_buggy = buggy_finally_assert
def buggy_skip(record):
"""I am a buggy skip."""
raise Exception('something went wrong')
validator.add_skip(buggy_skip)
data = (
('foo', 'bar'),
('ab', '56')
)
problems = validator.validate(data, report_unexpected_exceptions=False)
n = len(problems)
assert n == 1, n
p = problems[0]
assert p['row'] == 2
problems = validator.validate(data) # by default, exceptions are reported as problems
n = len(problems)
assert n == 10, n
unexpected_problems = [p for p in problems if p['code'] == UNEXPECTED_EXCEPTION]
assert len(unexpected_problems) == 9
for p in unexpected_problems:
e = p['exception']
assert e.args[0] == 'something went wrong', e.args
def test_summarize():
"""Test use of summarize option."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
def foo_gt_bar(r):
return int(r['foo']) > int(r['bar'])
validator.add_record_predicate(foo_gt_bar)
data = (
('foo', 'bar'),
('7', '3'), # valid
('1', '3') # invalid
)
problems = validator.validate(data, summarize=True)
n = len(problems)
assert n == 1, n
p = problems[0]
assert p['code'] == RECORD_PREDICATE_FALSE
for k in ('message', 'row', 'record'):
assert k not in p
def test_limit():
"""Test the use of the limit option."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
def foo_gt_bar(r):
return int(r['foo']) > int(r['bar'])
validator.add_record_predicate(foo_gt_bar)
data = (
('foo', 'bar'),
('7', '3'), # valid
('1', '3'), # invalid
('2', '3') # invalid
)
problems = validator.validate(data, limit=1)
n = len(problems)
assert n == 1, n
problems = validator.validate(data)
n = len(problems)
assert n == 2, n
def test_context():
"""Test passing in of context information."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
def foo_gt_bar(r):
return int(r['foo']) > int(r['bar'])
validator.add_record_predicate(foo_gt_bar)
data = (
('foo', 'bar'),
('7', '3'), # valid
('1', '3') # invalid
)
context = {'info': 'file X'}
problems = validator.validate(data, context=context)
n = len(problems)
assert n == 1, n
p = problems[0]
assert p['context'] == context
def test_write_problems():
"""Test writing problems as restructured text."""
class MockFile(object):
def __init__(self):
self.content = ''
def write(self, s):
self.content += s
file = MockFile()
problems = [
{
'code': 'X1',
'message': 'invalid foo',
'row': 2,
'field': 'foo',
'context': {
'info': 'interesting'
}
},
{
'code': 'X2',
'message': 'invalid bar',
'row': 3,
'field': 'bar',
'context': {
'info': 'very interesting'
}
}
]
expectation = """
=================
Validation Report
=================
Problems
========
X1 - invalid foo
----------------
:field: foo
:row: 2
:info: interesting
X2 - invalid bar
----------------
:field: bar
:row: 3
:info: very interesting
Summary
=======
Found 2 problems in total.
:X1: 1
:X2: 1
"""
write_problems(problems, file)
assert file.content == expectation, file.content
def test_write_problems_summarize():
"""Test writing a problem summary as restructured text."""
class MockFile(object):
def __init__(self):
self.content = ''
def write(self, s):
self.content += s
file = MockFile()
problems = [
{
'code': 'X1',
'message': 'invalid foo',
'row': 2,
'field': 'foo',
'context': {
'info': 'interesting'
}
},
{
'code': 'X2',
'message': 'invalid bar',
'row': 3,
'field': 'bar',
'context': {
'info': 'very interesting'
}
},
{
'code': 'X2',
'message': 'invalid bar',
'row': 4,
'field': 'bar',
'context': {
'info': 'very very interesting'
}
}
]
expectation = """
=================
Validation Report
=================
Summary
=======
Found 3 problems in total.
:X1: 1
:X2: 2
"""
write_problems(problems, file, summarize=True)
assert file.content == expectation, file.content
def test_write_problems_with_limit():
"""Test writing problems with a limit as restructured text."""
class MockFile(object):
def __init__(self):
self.content = ''
def write(self, s):
self.content += s
file = MockFile()
problems = [
{
'code': 'X1',
'message': 'invalid foo',
'row': 2,
'field': 'foo',
'context': {
'info': 'interesting'
}
},
{
'code': 'X2',
'message': 'invalid bar',
'row': 3,
'field': 'bar',
'context': {
'info': 'very interesting'
}
}
]
expectation = """
=================
Validation Report
=================
Problems
========
X1 - invalid foo
----------------
:field: foo
:row: 2
:info: interesting
Summary
=======
Found at least 1 problem in total.
:X1: 1
"""
write_problems(problems, file, limit=1)
assert file.content == expectation, file.content
def test_skips():
"""Test skip functions."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
validator.add_record_length_check()
validator.add_value_check('foo', int)
def skip_pragma(record):
return record[0].startswith('##')
validator.add_skip(skip_pragma)
data = (
('foo', 'bar'),
('1', 'X'),
('## this row', 'should be', 'skipped'),
('3', 'Y')
)
problems = validator.validate(data)
assert len(problems) == 0, problems
def test_guard_conditions():
"""Test some guard conditions."""
field_names = ('foo', 'bar')
validator = CSVValidator(field_names)
try:
validator.add_value_check('foo', 'i am not callable')
except AssertionError:
pass # expected
else:
assert False, 'expected exception'
|
termie/jaikuengine | refs/heads/master | common/protocol/base.py | 34 | # Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from common import exception
class Connection(object):
pass
class Service(object):
connection = None
handlers = None
_handlers = None
def __init__(self, connection):
self.connection = connection
self._handlers = []
def init_handlers(self):
if not self.handlers:
return
for handler_class in self.handlers:
self._handlers.append(handler_class(self))
def handle_message(self, sender, target, message):
matched = None
handler = None
for h in self._handlers:
matched = h.match(sender, message)
if matched:
handler = h
break
if not matched:
rv = self.unknown(sender, message)
return self.response_ok(rv)
try:
rv = handler.handle(sender, matched, message)
return self.response_ok(rv)
except exception.UserVisibleError, e:
exception.log_exception()
self.send_message([sender], str(e))
return self.response_error(e)
except exception.Error:
exception.log_exception()
|
gangadharkadam/johnerp | refs/heads/develop | erpnext/accounts/doctype/purchase_invoice/test_purchase_invoice.py | 7 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import unittest
import frappe
import frappe.model
import json
from frappe.utils import cint
import frappe.defaults
from erpnext.stock.doctype.purchase_receipt.test_purchase_receipt import set_perpetual_inventory, \
test_records as pr_test_records
test_dependencies = ["Item", "Cost Center"]
test_ignore = ["Serial No"]
class TestPurchaseInvoice(unittest.TestCase):
def test_gl_entries_without_auto_accounting_for_stock(self):
set_perpetual_inventory(0)
self.assertTrue(not cint(frappe.defaults.get_global_default("auto_accounting_for_stock")))
wrapper = frappe.copy_doc(test_records[0])
wrapper.insert()
wrapper.submit()
wrapper.load_from_db()
dl = wrapper
expected_gl_entries = {
"_Test Supplier - _TC": [0, 1512.30],
"_Test Account Cost for Goods Sold - _TC": [1250, 0],
"_Test Account Shipping Charges - _TC": [100, 0],
"_Test Account Excise Duty - _TC": [140, 0],
"_Test Account Education Cess - _TC": [2.8, 0],
"_Test Account S&H Education Cess - _TC": [1.4, 0],
"_Test Account CST - _TC": [29.88, 0],
"_Test Account VAT - _TC": [156.25, 0],
"_Test Account Discount - _TC": [0, 168.03],
}
gl_entries = frappe.db.sql("""select account, debit, credit from `tabGL Entry`
where voucher_type = 'Purchase Invoice' and voucher_no = %s""", dl.name, as_dict=1)
for d in gl_entries:
self.assertEqual([d.debit, d.credit], expected_gl_entries.get(d.account))
def test_gl_entries_with_auto_accounting_for_stock(self):
set_perpetual_inventory(1)
self.assertEqual(cint(frappe.defaults.get_global_default("auto_accounting_for_stock")), 1)
pi = frappe.copy_doc(test_records[1])
pi.insert()
pi.submit()
gl_entries = frappe.db.sql("""select account, debit, credit
from `tabGL Entry` where voucher_type='Purchase Invoice' and voucher_no=%s
order by account asc""", pi.name, as_dict=1)
self.assertTrue(gl_entries)
expected_values = sorted([
["_Test Supplier - _TC", 0, 720],
["Stock Received But Not Billed - _TC", 750.0, 0],
["Expenses Included In Valuation - _TC", 0.0, 250.0],
["_Test Account Shipping Charges - _TC", 100.0, 0],
["_Test Account VAT - _TC", 120.0, 0],
])
for i, gle in enumerate(gl_entries):
self.assertEquals(expected_values[i][0], gle.account)
self.assertEquals(expected_values[i][1], gle.debit)
self.assertEquals(expected_values[i][2], gle.credit)
set_perpetual_inventory(0)
def test_gl_entries_with_auto_accounting_for_stock_against_pr(self):
set_perpetual_inventory(1)
self.assertEqual(cint(frappe.defaults.get_global_default("auto_accounting_for_stock")), 1)
pr = frappe.copy_doc(pr_test_records[0])
pr.submit()
pi = frappe.copy_doc(test_records[1])
for d in pi.get("entries"):
d.purchase_receipt = pr.name
pi.insert()
pi.submit()
gl_entries = frappe.db.sql("""select account, debit, credit
from `tabGL Entry` where voucher_type='Purchase Invoice' and voucher_no=%s
order by account asc""", pi.name, as_dict=1)
self.assertTrue(gl_entries)
expected_values = sorted([
["_Test Supplier - _TC", 0, 720],
["Stock Received But Not Billed - _TC", 500.0, 0],
["_Test Account Shipping Charges - _TC", 100.0, 0],
["_Test Account VAT - _TC", 120.0, 0],
])
for i, gle in enumerate(gl_entries):
self.assertEquals(expected_values[i][0], gle.account)
self.assertEquals(expected_values[i][1], gle.debit)
self.assertEquals(expected_values[i][2], gle.credit)
set_perpetual_inventory(0)
def test_gl_entries_with_aia_for_non_stock_items(self):
set_perpetual_inventory()
self.assertEqual(cint(frappe.defaults.get_global_default("auto_accounting_for_stock")), 1)
pi = frappe.copy_doc(test_records[1])
pi.get("entries")[0].item_code = "_Test Non Stock Item"
pi.get("entries")[0].expense_account = "_Test Account Cost for Goods Sold - _TC"
pi.get("other_charges").pop(0)
pi.get("other_charges").pop(1)
pi.insert()
pi.submit()
gl_entries = frappe.db.sql("""select account, debit, credit
from `tabGL Entry` where voucher_type='Purchase Invoice' and voucher_no=%s
order by account asc""", pi.name, as_dict=1)
self.assertTrue(gl_entries)
expected_values = sorted([
["_Test Supplier - _TC", 0, 620],
["_Test Account Cost for Goods Sold - _TC", 500.0, 0],
["_Test Account VAT - _TC", 120.0, 0],
])
for i, gle in enumerate(gl_entries):
self.assertEquals(expected_values[i][0], gle.account)
self.assertEquals(expected_values[i][1], gle.debit)
self.assertEquals(expected_values[i][2], gle.credit)
set_perpetual_inventory(0)
def test_purchase_invoice_calculation(self):
wrapper = frappe.copy_doc(test_records[0])
wrapper.insert()
wrapper.load_from_db()
expected_values = [
["_Test Item Home Desktop 100", 90, 59],
["_Test Item Home Desktop 200", 135, 177]
]
for i, item in enumerate(wrapper.get("entries")):
self.assertEqual(item.item_code, expected_values[i][0])
self.assertEqual(item.item_tax_amount, expected_values[i][1])
self.assertEqual(item.valuation_rate, expected_values[i][2])
self.assertEqual(wrapper.net_total, 1250)
# tax amounts
expected_values = [
["_Test Account Shipping Charges - _TC", 100, 1350],
["_Test Account Customs Duty - _TC", 125, 1350],
["_Test Account Excise Duty - _TC", 140, 1490],
["_Test Account Education Cess - _TC", 2.8, 1492.8],
["_Test Account S&H Education Cess - _TC", 1.4, 1494.2],
["_Test Account CST - _TC", 29.88, 1524.08],
["_Test Account VAT - _TC", 156.25, 1680.33],
["_Test Account Discount - _TC", 168.03, 1512.30],
]
for i, tax in enumerate(wrapper.get("other_charges")):
self.assertEqual(tax.account_head, expected_values[i][0])
self.assertEqual(tax.tax_amount, expected_values[i][1])
self.assertEqual(tax.total, expected_values[i][2])
def test_purchase_invoice_with_subcontracted_item(self):
wrapper = frappe.copy_doc(test_records[0])
wrapper.get("entries")[0].item_code = "_Test FG Item"
wrapper.insert()
wrapper.load_from_db()
expected_values = [
["_Test FG Item", 90, 59],
["_Test Item Home Desktop 200", 135, 177]
]
for i, item in enumerate(wrapper.get("entries")):
self.assertEqual(item.item_code, expected_values[i][0])
self.assertEqual(item.item_tax_amount, expected_values[i][1])
self.assertEqual(item.valuation_rate, expected_values[i][2])
self.assertEqual(wrapper.net_total, 1250)
# tax amounts
expected_values = [
["_Test Account Shipping Charges - _TC", 100, 1350],
["_Test Account Customs Duty - _TC", 125, 1350],
["_Test Account Excise Duty - _TC", 140, 1490],
["_Test Account Education Cess - _TC", 2.8, 1492.8],
["_Test Account S&H Education Cess - _TC", 1.4, 1494.2],
["_Test Account CST - _TC", 29.88, 1524.08],
["_Test Account VAT - _TC", 156.25, 1680.33],
["_Test Account Discount - _TC", 168.03, 1512.30],
]
for i, tax in enumerate(wrapper.get("other_charges")):
self.assertEqual(tax.account_head, expected_values[i][0])
self.assertEqual(tax.tax_amount, expected_values[i][1])
self.assertEqual(tax.total, expected_values[i][2])
def test_purchase_invoice_with_advance(self):
from erpnext.accounts.doctype.journal_voucher.test_journal_voucher \
import test_records as jv_test_records
jv = frappe.copy_doc(jv_test_records[1])
jv.insert()
jv.submit()
pi = frappe.copy_doc(test_records[0])
pi.append("advance_allocation_details", {
"journal_voucher": jv.name,
"jv_detail_no": jv.get("entries")[0].name,
"advance_amount": 400,
"allocated_amount": 300,
"remarks": jv.remark
})
pi.insert()
pi.submit()
pi.load_from_db()
self.assertTrue(frappe.db.sql("""select name from `tabJournal Voucher Detail`
where against_voucher=%s""", pi.name))
self.assertTrue(frappe.db.sql("""select name from `tabJournal Voucher Detail`
where against_voucher=%s and debit=300""", pi.name))
self.assertEqual(pi.outstanding_amount, 1212.30)
pi.cancel()
self.assertTrue(not frappe.db.sql("""select name from `tabJournal Voucher Detail`
where against_voucher=%s""", pi.name))
test_records = frappe.get_test_records('Purchase Invoice')
|
hwjworld/xiaodun-platform | refs/heads/master | common/djangoapps/user_api/__init__.py | 12133432 | |
levilucio/SyVOLT | refs/heads/master | GM2AUTOSAR_MM/cardinality_resolution/__init__.py | 12133432 | |
kave/cfgov-refresh | refs/heads/flapjack | cfgov/v1/wagtail_hooks.py | 1 | import os
import json
from urlparse import urlsplit
from django.http import Http404
from django.contrib.auth.models import Permission
from django.utils.html import escape
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore import hooks
from v1.models import CFGOVPage
@hooks.register('after_create_page')
@hooks.register('after_edit_page')
def share_the_page(request, page):
page = page.specific
parent_page = page.parent()
parent_page_perms = parent_page.permissions_for_user(request.user)
if parent_page.slug != 'root':
is_publishing = bool(request.POST.get('action-publish')) and parent_page_perms.can_publish()
is_sharing = bool(request.POST.get('action-share')) and parent_page_perms.can_publish()
else: # Giving Root page permissions to publish/share
is_publishing = bool(request.POST.get('action-publish'))
is_sharing = bool(request.POST.get('action-share'))
# If the page is being shared or published, set `shared` to True or else False
# and save the page.
if is_sharing or is_publishing:
page.shared = True
else:
page.shared = False
page.save()
# If the page isn't being published but the page is live and the editor
# wants to share updated content that doesn't show on the production site,
# we must set the page.live to True, delete the latest revision, and save
# a new revision with `live` = False. This doesn't affect the page's published
# status, as the saved page object in the database still has `live` equal to
# True and we're never commiting the change. As seen in CFGOVPage's route
# method, `route()` will select the latest revision of the page where `live`
# is set to True and return that revision as a page object to serve the request with.
if not is_publishing:
page.live = False
latest = page.get_latest_revision()
content_json = json.loads(latest.content_json)
content_json['live'], content_json['shared'] = page.live, page.shared
latest.content_json = json.dumps(content_json)
latest.save()
if is_publishing:
latest.publish()
@hooks.register('before_serve_page')
def check_request_site(page, request, serve_args, serve_kwargs):
if request.site.hostname == os.environ.get('STAGING_HOSTNAME'):
if isinstance(page, CFGOVPage):
if not page.shared:
raise Http404
@hooks.register('register_permissions')
def register_share_permissions():
return Permission.objects.filter(codename='share_page')
class CFGovLinkHandler(object):
"""
CFGovLinkHandler will be invoked whenever we encounter an <a> element in HTML content
with an attribute of data-linktype="page". The resulting element in the database
representation will be:
<a linktype="page" id="42">hello world</a>
"""
@staticmethod
def get_db_attributes(tag):
"""
Given an <a> tag that we've identified as a page link embed (because it has a
data-linktype="page" attribute), return a dict of the attributes we should
have on the resulting <a linktype="page"> element.
"""
return {'id': tag['data-id']}
@staticmethod
def expand_db_attributes(attrs, for_editor):
try:
page = Page.objects.get(id=attrs['id'])
if for_editor:
editor_attrs = 'data-linktype="page" data-id="%d" ' % page.id
else:
editor_attrs = ''
return '<a %shref="%s">' % (editor_attrs, escape(urlsplit(page.url).path))
except Page.DoesNotExist:
return "<a>"
@hooks.register('register_rich_text_link_handler')
def register_cfgov_link_handler():
return ('page', CFGovLinkHandler)
|
googleapis/googleapis-gen | refs/heads/master | google/logging/v2/logging-v2-py/google/cloud/logging_v2/types/logging_config.py | 2 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.logging.v2',
manifest={
'LifecycleState',
'LogBucket',
'LogView',
'LogSink',
'BigQueryOptions',
'ListBucketsRequest',
'ListBucketsResponse',
'CreateBucketRequest',
'UpdateBucketRequest',
'GetBucketRequest',
'DeleteBucketRequest',
'UndeleteBucketRequest',
'ListViewsRequest',
'ListViewsResponse',
'CreateViewRequest',
'UpdateViewRequest',
'GetViewRequest',
'DeleteViewRequest',
'ListSinksRequest',
'ListSinksResponse',
'GetSinkRequest',
'CreateSinkRequest',
'UpdateSinkRequest',
'DeleteSinkRequest',
'LogExclusion',
'ListExclusionsRequest',
'ListExclusionsResponse',
'GetExclusionRequest',
'CreateExclusionRequest',
'UpdateExclusionRequest',
'DeleteExclusionRequest',
'GetCmekSettingsRequest',
'UpdateCmekSettingsRequest',
'CmekSettings',
},
)
class LifecycleState(proto.Enum):
r"""LogBucket lifecycle states."""
LIFECYCLE_STATE_UNSPECIFIED = 0
ACTIVE = 1
DELETE_REQUESTED = 2
class LogBucket(proto.Message):
r"""Describes a repository of logs.
Attributes:
name (str):
The resource name of the bucket. For example:
"projects/my-project-id/locations/my-location/buckets/my-bucket-id
The supported locations are: "global"
For the location of ``global`` it is unspecified where logs
are actually stored. Once a bucket has been created, the
location can not be changed.
description (str):
Describes this bucket.
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The creation timestamp of the
bucket. This is not set for any of the default
buckets.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The last update timestamp of the
bucket.
retention_days (int):
Logs will be retained by default for this
amount of time, after which they will
automatically be deleted. The minimum retention
period is 1 day. If this value is set to zero at
bucket creation time, the default time of 30
days will be used.
locked (bool):
Whether the bucket has been locked.
The retention period on a locked bucket may not
be changed. Locked buckets may only be deleted
if they are empty.
lifecycle_state (google.cloud.logging_v2.types.LifecycleState):
Output only. The bucket lifecycle state.
"""
name = proto.Field(
proto.STRING,
number=1,
)
description = proto.Field(
proto.STRING,
number=3,
)
create_time = proto.Field(
proto.MESSAGE,
number=4,
message=timestamp_pb2.Timestamp,
)
update_time = proto.Field(
proto.MESSAGE,
number=5,
message=timestamp_pb2.Timestamp,
)
retention_days = proto.Field(
proto.INT32,
number=11,
)
locked = proto.Field(
proto.BOOL,
number=9,
)
lifecycle_state = proto.Field(
proto.ENUM,
number=12,
enum='LifecycleState',
)
class LogView(proto.Message):
r"""Describes a view over logs in a bucket.
Attributes:
name (str):
The resource name of the view.
For example
"projects/my-project-id/locations/my-
location/buckets/my-bucket-id/views/my-view
description (str):
Describes this view.
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The creation timestamp of the
view.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The last update timestamp of the
view.
filter (str):
Filter that restricts which log entries in a bucket are
visible in this view. Filters are restricted to be a logical
AND of ==/!= of any of the following: originating
project/folder/organization/billing account. resource type
log id Example: SOURCE("projects/myproject") AND
resource.type = "gce_instance" AND LOG_ID("stdout")
"""
name = proto.Field(
proto.STRING,
number=1,
)
description = proto.Field(
proto.STRING,
number=3,
)
create_time = proto.Field(
proto.MESSAGE,
number=4,
message=timestamp_pb2.Timestamp,
)
update_time = proto.Field(
proto.MESSAGE,
number=5,
message=timestamp_pb2.Timestamp,
)
filter = proto.Field(
proto.STRING,
number=7,
)
class LogSink(proto.Message):
r"""Describes a sink used to export log entries to one of the
following destinations in any project: a Cloud Storage bucket, a
BigQuery dataset, or a Cloud Pub/Sub topic. A logs filter
controls which log entries are exported. The sink must be
created within a project, organization, billing account, or
folder.
Attributes:
name (str):
Required. The client-assigned sink identifier, unique within
the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink
identifiers are limited to 100 characters and can include
only the following characters: upper and lower-case
alphanumeric characters, underscores, hyphens, and periods.
First character has to be alphanumeric.
destination (str):
Required. The export destination:
::
"storage.googleapis.com/[GCS_BUCKET]"
"bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]"
"pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]"
The sink's ``writer_identity``, set when the sink is
created, must have permission to write to the destination or
else the log entries are not exported. For more information,
see `Exporting Logs with
Sinks <https://cloud.google.com/logging/docs/api/tasks/exporting-logs>`__.
filter (str):
Optional. An `advanced logs
filter <https://cloud.google.com/logging/docs/view/advanced-queries>`__.
The only exported log entries are those that are in the
resource owning the sink and that match the filter. For
example:
::
logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR
description (str):
Optional. A description of this sink.
The maximum length of the description is 8000
characters.
disabled (bool):
Optional. If set to True, then this sink is
disabled and it does not export any log entries.
exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]):
Optional. Log entries that match any of the exclusion
filters will not be exported. If a log entry is matched by
both ``filter`` and one of ``exclusion_filters`` it will not
be exported.
output_version_format (google.cloud.logging_v2.types.LogSink.VersionFormat):
Deprecated. This field is unused.
writer_identity (str):
Output only. An IAM identity—a service account or
group—under which Logging writes the exported log entries to
the sink's destination. This field is set by
[sinks.create][google.logging.v2.ConfigServiceV2.CreateSink]
and
[sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink]
based on the value of ``unique_writer_identity`` in those
methods.
Until you grant this identity write-access to the
destination, log entry exports from this sink will fail. For
more information, see `Granting Access for a
Resource <https://cloud.google.com/iam/docs/granting-roles-to-service-accounts#granting_access_to_a_service_account_for_a_resource>`__.
Consult the destination service's documentation to determine
the appropriate IAM roles to assign to the identity.
include_children (bool):
Optional. This field applies only to sinks owned by
organizations and folders. If the field is false, the
default, only the logs owned by the sink's parent resource
are available for export. If the field is true, then logs
from all the projects, folders, and billing accounts
contained in the sink's parent resource are also available
for export. Whether a particular log entry from the children
is exported depends on the sink's filter expression. For
example, if this field is true, then the filter
``resource.type=gce_instance`` would export all Compute
Engine VM instance log entries from all projects in the
sink's parent. To only export entries from certain child
projects, filter on the project part of the log name:
::
logName:("projects/test-project1/" OR "projects/test-project2/") AND
resource.type=gce_instance
bigquery_options (google.cloud.logging_v2.types.BigQueryOptions):
Optional. Options that affect sinks exporting
data to BigQuery.
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The creation timestamp of the
sink.
This field may not be present for older sinks.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The last update timestamp of the
sink.
This field may not be present for older sinks.
"""
class VersionFormat(proto.Enum):
r"""Deprecated. This is unused."""
VERSION_FORMAT_UNSPECIFIED = 0
V2 = 1
V1 = 2
name = proto.Field(
proto.STRING,
number=1,
)
destination = proto.Field(
proto.STRING,
number=3,
)
filter = proto.Field(
proto.STRING,
number=5,
)
description = proto.Field(
proto.STRING,
number=18,
)
disabled = proto.Field(
proto.BOOL,
number=19,
)
exclusions = proto.RepeatedField(
proto.MESSAGE,
number=16,
message='LogExclusion',
)
output_version_format = proto.Field(
proto.ENUM,
number=6,
enum=VersionFormat,
)
writer_identity = proto.Field(
proto.STRING,
number=8,
)
include_children = proto.Field(
proto.BOOL,
number=9,
)
bigquery_options = proto.Field(
proto.MESSAGE,
number=12,
oneof='options',
message='BigQueryOptions',
)
create_time = proto.Field(
proto.MESSAGE,
number=13,
message=timestamp_pb2.Timestamp,
)
update_time = proto.Field(
proto.MESSAGE,
number=14,
message=timestamp_pb2.Timestamp,
)
class BigQueryOptions(proto.Message):
r"""Options that change functionality of a sink exporting data to
BigQuery.
Attributes:
use_partitioned_tables (bool):
Optional. Whether to use `BigQuery's partition
tables <https://cloud.google.com/bigquery/docs/partitioned-tables>`__.
By default, Logging creates dated tables based on the log
entries' timestamps, e.g. syslog_20170523. With partitioned
tables the date suffix is no longer present and `special
query
syntax <https://cloud.google.com/bigquery/docs/querying-partitioned-tables>`__
has to be used instead. In both cases, tables are sharded
based on UTC timezone.
uses_timestamp_column_partitioning (bool):
Output only. True if new timestamp column based partitioning
is in use, false if legacy ingestion-time partitioning is in
use. All new sinks will have this field set true and will
use timestamp column based partitioning. If
use_partitioned_tables is false, this value has no meaning
and will be false. Legacy sinks using partitioned tables
will have this field set to false.
"""
use_partitioned_tables = proto.Field(
proto.BOOL,
number=1,
)
uses_timestamp_column_partitioning = proto.Field(
proto.BOOL,
number=3,
)
class ListBucketsRequest(proto.Message):
r"""The parameters to ``ListBuckets``.
Attributes:
parent (str):
Required. The parent resource whose buckets are to be
listed:
::
"projects/[PROJECT_ID]/locations/[LOCATION_ID]"
"organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]"
"folders/[FOLDER_ID]/locations/[LOCATION_ID]"
Note: The locations portion of the resource must be
specified, but supplying the character ``-`` in place of
[LOCATION_ID] will return all buckets.
page_token (str):
Optional. If present, then retrieve the next batch of
results from the preceding call to this method.
``pageToken`` must be the value of ``nextPageToken`` from
the previous response. The values of other method parameters
should be identical to those in the previous call.
page_size (int):
Optional. The maximum number of results to return from this
request. Non-positive values are ignored. The presence of
``nextPageToken`` in the response indicates that more
results might be available.
"""
parent = proto.Field(
proto.STRING,
number=1,
)
page_token = proto.Field(
proto.STRING,
number=2,
)
page_size = proto.Field(
proto.INT32,
number=3,
)
class ListBucketsResponse(proto.Message):
r"""The response from ListBuckets.
Attributes:
buckets (Sequence[google.cloud.logging_v2.types.LogBucket]):
A list of buckets.
next_page_token (str):
If there might be more results than appear in this response,
then ``nextPageToken`` is included. To get the next set of
results, call the same method again using the value of
``nextPageToken`` as ``pageToken``.
"""
@property
def raw_page(self):
return self
buckets = proto.RepeatedField(
proto.MESSAGE,
number=1,
message='LogBucket',
)
next_page_token = proto.Field(
proto.STRING,
number=2,
)
class CreateBucketRequest(proto.Message):
r"""The parameters to ``CreateBucket``.
Attributes:
parent (str):
Required. The resource in which to create the bucket:
::
"projects/[PROJECT_ID]/locations/[LOCATION_ID]"
Example: ``"projects/my-logging-project/locations/global"``
bucket_id (str):
Required. A client-assigned identifier such as
``"my-bucket"``. Identifiers are limited to 100 characters
and can include only letters, digits, underscores, hyphens,
and periods.
bucket (google.cloud.logging_v2.types.LogBucket):
Required. The new bucket. The region
specified in the new bucket must be compliant
with any Location Restriction Org Policy. The
name field in the bucket is ignored.
"""
parent = proto.Field(
proto.STRING,
number=1,
)
bucket_id = proto.Field(
proto.STRING,
number=2,
)
bucket = proto.Field(
proto.MESSAGE,
number=3,
message='LogBucket',
)
class UpdateBucketRequest(proto.Message):
r"""The parameters to ``UpdateBucket``.
Attributes:
name (str):
Required. The full resource name of the bucket to update.
::
"projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
"organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
"folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
Example:
``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``.
Also requires permission
"resourcemanager.projects.updateLiens" to set the locked
property
bucket (google.cloud.logging_v2.types.LogBucket):
Required. The updated bucket.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. Field mask that specifies the fields in ``bucket``
that need an update. A bucket field will be overwritten if,
and only if, it is in the update mask. ``name`` and output
only fields cannot be updated.
For a detailed ``FieldMask`` definition, see
https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask
Example: ``updateMask=retention_days``.
"""
name = proto.Field(
proto.STRING,
number=1,
)
bucket = proto.Field(
proto.MESSAGE,
number=2,
message='LogBucket',
)
update_mask = proto.Field(
proto.MESSAGE,
number=4,
message=field_mask_pb2.FieldMask,
)
class GetBucketRequest(proto.Message):
r"""The parameters to ``GetBucket``.
Attributes:
name (str):
Required. The resource name of the bucket:
::
"projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
"organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
"folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
Example:
``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``.
"""
name = proto.Field(
proto.STRING,
number=1,
)
class DeleteBucketRequest(proto.Message):
r"""The parameters to ``DeleteBucket``.
Attributes:
name (str):
Required. The full resource name of the bucket to delete.
::
"projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
"organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
"folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
Example:
``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``.
"""
name = proto.Field(
proto.STRING,
number=1,
)
class UndeleteBucketRequest(proto.Message):
r"""The parameters to ``UndeleteBucket``.
Attributes:
name (str):
Required. The full resource name of the bucket to undelete.
::
"projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
"organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
"folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
Example:
``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``.
"""
name = proto.Field(
proto.STRING,
number=1,
)
class ListViewsRequest(proto.Message):
r"""The parameters to ``ListViews``.
Attributes:
parent (str):
Required. The bucket whose views are to be listed:
::
"projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]".
page_token (str):
Optional. If present, then retrieve the next batch of
results from the preceding call to this method.
``pageToken`` must be the value of ``nextPageToken`` from
the previous response. The values of other method parameters
should be identical to those in the previous call.
page_size (int):
Optional. The maximum number of results to return from this
request. Non-positive values are ignored. The presence of
``nextPageToken`` in the response indicates that more
results might be available.
"""
parent = proto.Field(
proto.STRING,
number=1,
)
page_token = proto.Field(
proto.STRING,
number=2,
)
page_size = proto.Field(
proto.INT32,
number=3,
)
class ListViewsResponse(proto.Message):
r"""The response from ListViews.
Attributes:
views (Sequence[google.cloud.logging_v2.types.LogView]):
A list of views.
next_page_token (str):
If there might be more results than appear in this response,
then ``nextPageToken`` is included. To get the next set of
results, call the same method again using the value of
``nextPageToken`` as ``pageToken``.
"""
@property
def raw_page(self):
return self
views = proto.RepeatedField(
proto.MESSAGE,
number=1,
message='LogView',
)
next_page_token = proto.Field(
proto.STRING,
number=2,
)
class CreateViewRequest(proto.Message):
r"""The parameters to ``CreateView``.
Attributes:
parent (str):
Required. The bucket in which to create the view
::
"projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"
Example:
``"projects/my-logging-project/locations/my-location/buckets/my-bucket"``
view_id (str):
Required. The id to use for this view.
view (google.cloud.logging_v2.types.LogView):
Required. The new view.
"""
parent = proto.Field(
proto.STRING,
number=1,
)
view_id = proto.Field(
proto.STRING,
number=2,
)
view = proto.Field(
proto.MESSAGE,
number=3,
message='LogView',
)
class UpdateViewRequest(proto.Message):
r"""The parameters to ``UpdateView``.
Attributes:
name (str):
Required. The full resource name of the view to update
::
"projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]"
Example:
``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``.
view (google.cloud.logging_v2.types.LogView):
Required. The updated view.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Optional. Field mask that specifies the fields in ``view``
that need an update. A field will be overwritten if, and
only if, it is in the update mask. ``name`` and output only
fields cannot be updated.
For a detailed ``FieldMask`` definition, see
https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask
Example: ``updateMask=filter``.
"""
name = proto.Field(
proto.STRING,
number=1,
)
view = proto.Field(
proto.MESSAGE,
number=2,
message='LogView',
)
update_mask = proto.Field(
proto.MESSAGE,
number=4,
message=field_mask_pb2.FieldMask,
)
class GetViewRequest(proto.Message):
r"""The parameters to ``GetView``.
Attributes:
name (str):
Required. The resource name of the policy:
::
"projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]"
Example:
``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``.
"""
name = proto.Field(
proto.STRING,
number=1,
)
class DeleteViewRequest(proto.Message):
r"""The parameters to ``DeleteView``.
Attributes:
name (str):
Required. The full resource name of the view to delete:
::
"projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]"
Example:
``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``.
"""
name = proto.Field(
proto.STRING,
number=1,
)
class ListSinksRequest(proto.Message):
r"""The parameters to ``ListSinks``.
Attributes:
parent (str):
Required. The parent resource whose sinks are to be listed:
::
"projects/[PROJECT_ID]"
"organizations/[ORGANIZATION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]"
"folders/[FOLDER_ID]".
page_token (str):
Optional. If present, then retrieve the next batch of
results from the preceding call to this method.
``pageToken`` must be the value of ``nextPageToken`` from
the previous response. The values of other method parameters
should be identical to those in the previous call.
page_size (int):
Optional. The maximum number of results to return from this
request. Non-positive values are ignored. The presence of
``nextPageToken`` in the response indicates that more
results might be available.
"""
parent = proto.Field(
proto.STRING,
number=1,
)
page_token = proto.Field(
proto.STRING,
number=2,
)
page_size = proto.Field(
proto.INT32,
number=3,
)
class ListSinksResponse(proto.Message):
r"""Result returned from ``ListSinks``.
Attributes:
sinks (Sequence[google.cloud.logging_v2.types.LogSink]):
A list of sinks.
next_page_token (str):
If there might be more results than appear in this response,
then ``nextPageToken`` is included. To get the next set of
results, call the same method again using the value of
``nextPageToken`` as ``pageToken``.
"""
@property
def raw_page(self):
return self
sinks = proto.RepeatedField(
proto.MESSAGE,
number=1,
message='LogSink',
)
next_page_token = proto.Field(
proto.STRING,
number=2,
)
class GetSinkRequest(proto.Message):
r"""The parameters to ``GetSink``.
Attributes:
sink_name (str):
Required. The resource name of the sink:
::
"projects/[PROJECT_ID]/sinks/[SINK_ID]"
"organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
"folders/[FOLDER_ID]/sinks/[SINK_ID]"
Example: ``"projects/my-project-id/sinks/my-sink-id"``.
"""
sink_name = proto.Field(
proto.STRING,
number=1,
)
class CreateSinkRequest(proto.Message):
r"""The parameters to ``CreateSink``.
Attributes:
parent (str):
Required. The resource in which to create the sink:
::
"projects/[PROJECT_ID]"
"organizations/[ORGANIZATION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]"
"folders/[FOLDER_ID]"
Examples: ``"projects/my-logging-project"``,
``"organizations/123456789"``.
sink (google.cloud.logging_v2.types.LogSink):
Required. The new sink, whose ``name`` parameter is a sink
identifier that is not already in use.
unique_writer_identity (bool):
Optional. Determines the kind of IAM identity returned as
``writer_identity`` in the new sink. If this value is
omitted or set to false, and if the sink's parent is a
project, then the value returned as ``writer_identity`` is
the same group or service account used by Logging before the
addition of writer identities to this API. The sink's
destination must be in the same project as the sink itself.
If this field is set to true, or if the sink is owned by a
non-project resource such as an organization, then the value
of ``writer_identity`` will be a unique service account used
only for exports from the new sink. For more information,
see ``writer_identity`` in
[LogSink][google.logging.v2.LogSink].
"""
parent = proto.Field(
proto.STRING,
number=1,
)
sink = proto.Field(
proto.MESSAGE,
number=2,
message='LogSink',
)
unique_writer_identity = proto.Field(
proto.BOOL,
number=3,
)
class UpdateSinkRequest(proto.Message):
r"""The parameters to ``UpdateSink``.
Attributes:
sink_name (str):
Required. The full resource name of the sink to update,
including the parent resource and the sink identifier:
::
"projects/[PROJECT_ID]/sinks/[SINK_ID]"
"organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
"folders/[FOLDER_ID]/sinks/[SINK_ID]"
Example: ``"projects/my-project-id/sinks/my-sink-id"``.
sink (google.cloud.logging_v2.types.LogSink):
Required. The updated sink, whose name is the same
identifier that appears as part of ``sink_name``.
unique_writer_identity (bool):
Optional. See
[sinks.create][google.logging.v2.ConfigServiceV2.CreateSink]
for a description of this field. When updating a sink, the
effect of this field on the value of ``writer_identity`` in
the updated sink depends on both the old and new values of
this field:
- If the old and new values of this field are both false or
both true, then there is no change to the sink's
``writer_identity``.
- If the old value is false and the new value is true, then
``writer_identity`` is changed to a unique service
account.
- It is an error if the old value is true and the new value
is set to false or defaulted to false.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Optional. Field mask that specifies the fields in ``sink``
that need an update. A sink field will be overwritten if,
and only if, it is in the update mask. ``name`` and output
only fields cannot be updated.
An empty updateMask is temporarily treated as using the
following mask for backwards compatibility purposes:
destination,filter,includeChildren At some point in the
future, behavior will be removed and specifying an empty
updateMask will be an error.
For a detailed ``FieldMask`` definition, see
https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask
Example: ``updateMask=filter``.
"""
sink_name = proto.Field(
proto.STRING,
number=1,
)
sink = proto.Field(
proto.MESSAGE,
number=2,
message='LogSink',
)
unique_writer_identity = proto.Field(
proto.BOOL,
number=3,
)
update_mask = proto.Field(
proto.MESSAGE,
number=4,
message=field_mask_pb2.FieldMask,
)
class DeleteSinkRequest(proto.Message):
r"""The parameters to ``DeleteSink``.
Attributes:
sink_name (str):
Required. The full resource name of the sink to delete,
including the parent resource and the sink identifier:
::
"projects/[PROJECT_ID]/sinks/[SINK_ID]"
"organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
"folders/[FOLDER_ID]/sinks/[SINK_ID]"
Example: ``"projects/my-project-id/sinks/my-sink-id"``.
"""
sink_name = proto.Field(
proto.STRING,
number=1,
)
class LogExclusion(proto.Message):
r"""Specifies a set of log entries that are not to be stored in
Logging. If your GCP resource receives a large volume of logs,
you can use exclusions to reduce your chargeable logs.
Exclusions are processed after log sinks, so you can export log
entries before they are excluded. Note that organization-level
and folder-level exclusions don't apply to child resources, and
that you can't exclude audit log entries.
Attributes:
name (str):
Required. A client-assigned identifier, such as
``"load-balancer-exclusion"``. Identifiers are limited to
100 characters and can include only letters, digits,
underscores, hyphens, and periods. First character has to be
alphanumeric.
description (str):
Optional. A description of this exclusion.
filter (str):
Required. An `advanced logs
filter <https://cloud.google.com/logging/docs/view/advanced-queries>`__
that matches the log entries to be excluded. By using the
`sample
function <https://cloud.google.com/logging/docs/view/advanced-queries#sample>`__,
you can exclude less than 100% of the matching log entries.
For example, the following query matches 99% of low-severity
log entries from Google Cloud Storage buckets:
``"resource.type=gcs_bucket severity<ERROR sample(insertId, 0.99)"``
disabled (bool):
Optional. If set to True, then this exclusion is disabled
and it does not exclude any log entries. You can [update an
exclusion][google.logging.v2.ConfigServiceV2.UpdateExclusion]
to change the value of this field.
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The creation timestamp of the
exclusion.
This field may not be present for older
exclusions.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The last update timestamp of the
exclusion.
This field may not be present for older
exclusions.
"""
name = proto.Field(
proto.STRING,
number=1,
)
description = proto.Field(
proto.STRING,
number=2,
)
filter = proto.Field(
proto.STRING,
number=3,
)
disabled = proto.Field(
proto.BOOL,
number=4,
)
create_time = proto.Field(
proto.MESSAGE,
number=5,
message=timestamp_pb2.Timestamp,
)
update_time = proto.Field(
proto.MESSAGE,
number=6,
message=timestamp_pb2.Timestamp,
)
class ListExclusionsRequest(proto.Message):
r"""The parameters to ``ListExclusions``.
Attributes:
parent (str):
Required. The parent resource whose exclusions are to be
listed.
::
"projects/[PROJECT_ID]"
"organizations/[ORGANIZATION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]"
"folders/[FOLDER_ID]".
page_token (str):
Optional. If present, then retrieve the next batch of
results from the preceding call to this method.
``pageToken`` must be the value of ``nextPageToken`` from
the previous response. The values of other method parameters
should be identical to those in the previous call.
page_size (int):
Optional. The maximum number of results to return from this
request. Non-positive values are ignored. The presence of
``nextPageToken`` in the response indicates that more
results might be available.
"""
parent = proto.Field(
proto.STRING,
number=1,
)
page_token = proto.Field(
proto.STRING,
number=2,
)
page_size = proto.Field(
proto.INT32,
number=3,
)
class ListExclusionsResponse(proto.Message):
r"""Result returned from ``ListExclusions``.
Attributes:
exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]):
A list of exclusions.
next_page_token (str):
If there might be more results than appear in this response,
then ``nextPageToken`` is included. To get the next set of
results, call the same method again using the value of
``nextPageToken`` as ``pageToken``.
"""
@property
def raw_page(self):
return self
exclusions = proto.RepeatedField(
proto.MESSAGE,
number=1,
message='LogExclusion',
)
next_page_token = proto.Field(
proto.STRING,
number=2,
)
class GetExclusionRequest(proto.Message):
r"""The parameters to ``GetExclusion``.
Attributes:
name (str):
Required. The resource name of an existing exclusion:
::
"projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
"organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
"folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
Example:
``"projects/my-project-id/exclusions/my-exclusion-id"``.
"""
name = proto.Field(
proto.STRING,
number=1,
)
class CreateExclusionRequest(proto.Message):
r"""The parameters to ``CreateExclusion``.
Attributes:
parent (str):
Required. The parent resource in which to create the
exclusion:
::
"projects/[PROJECT_ID]"
"organizations/[ORGANIZATION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]"
"folders/[FOLDER_ID]"
Examples: ``"projects/my-logging-project"``,
``"organizations/123456789"``.
exclusion (google.cloud.logging_v2.types.LogExclusion):
Required. The new exclusion, whose ``name`` parameter is an
exclusion name that is not already used in the parent
resource.
"""
parent = proto.Field(
proto.STRING,
number=1,
)
exclusion = proto.Field(
proto.MESSAGE,
number=2,
message='LogExclusion',
)
class UpdateExclusionRequest(proto.Message):
r"""The parameters to ``UpdateExclusion``.
Attributes:
name (str):
Required. The resource name of the exclusion to update:
::
"projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
"organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
"folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
Example:
``"projects/my-project-id/exclusions/my-exclusion-id"``.
exclusion (google.cloud.logging_v2.types.LogExclusion):
Required. New values for the existing exclusion. Only the
fields specified in ``update_mask`` are relevant.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. A non-empty list of fields to change in the
existing exclusion. New values for the fields are taken from
the corresponding fields in the
[LogExclusion][google.logging.v2.LogExclusion] included in
this request. Fields not mentioned in ``update_mask`` are
not changed and are ignored in the request.
For example, to change the filter and description of an
exclusion, specify an ``update_mask`` of
``"filter,description"``.
"""
name = proto.Field(
proto.STRING,
number=1,
)
exclusion = proto.Field(
proto.MESSAGE,
number=2,
message='LogExclusion',
)
update_mask = proto.Field(
proto.MESSAGE,
number=3,
message=field_mask_pb2.FieldMask,
)
class DeleteExclusionRequest(proto.Message):
r"""The parameters to ``DeleteExclusion``.
Attributes:
name (str):
Required. The resource name of an existing exclusion to
delete:
::
"projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
"organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
"folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
Example:
``"projects/my-project-id/exclusions/my-exclusion-id"``.
"""
name = proto.Field(
proto.STRING,
number=1,
)
class GetCmekSettingsRequest(proto.Message):
r"""The parameters to
[GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings].
See `Enabling CMEK for Logs
Router <https://cloud.google.com/logging/docs/routing/managed-encryption>`__
for more information.
Attributes:
name (str):
Required. The resource for which to retrieve CMEK settings.
::
"projects/[PROJECT_ID]/cmekSettings"
"organizations/[ORGANIZATION_ID]/cmekSettings"
"billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings"
"folders/[FOLDER_ID]/cmekSettings"
Example: ``"organizations/12345/cmekSettings"``.
Note: CMEK for the Logs Router can currently only be
configured for GCP organizations. Once configured, it
applies to all projects and folders in the GCP organization.
"""
name = proto.Field(
proto.STRING,
number=1,
)
class UpdateCmekSettingsRequest(proto.Message):
r"""The parameters to
[UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings].
See `Enabling CMEK for Logs
Router <https://cloud.google.com/logging/docs/routing/managed-encryption>`__
for more information.
Attributes:
name (str):
Required. The resource name for the CMEK settings to update.
::
"projects/[PROJECT_ID]/cmekSettings"
"organizations/[ORGANIZATION_ID]/cmekSettings"
"billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings"
"folders/[FOLDER_ID]/cmekSettings"
Example: ``"organizations/12345/cmekSettings"``.
Note: CMEK for the Logs Router can currently only be
configured for GCP organizations. Once configured, it
applies to all projects and folders in the GCP organization.
cmek_settings (google.cloud.logging_v2.types.CmekSettings):
Required. The CMEK settings to update.
See `Enabling CMEK for Logs
Router <https://cloud.google.com/logging/docs/routing/managed-encryption>`__
for more information.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Optional. Field mask identifying which fields from
``cmek_settings`` should be updated. A field will be
overwritten if and only if it is in the update mask. Output
only fields cannot be updated.
See [FieldMask][google.protobuf.FieldMask] for more
information.
Example: ``"updateMask=kmsKeyName"``
"""
name = proto.Field(
proto.STRING,
number=1,
)
cmek_settings = proto.Field(
proto.MESSAGE,
number=2,
message='CmekSettings',
)
update_mask = proto.Field(
proto.MESSAGE,
number=3,
message=field_mask_pb2.FieldMask,
)
class CmekSettings(proto.Message):
r"""Describes the customer-managed encryption key (CMEK) settings
associated with a project, folder, organization, billing account, or
flexible resource.
Note: CMEK for the Logs Router can currently only be configured for
GCP organizations. Once configured, it applies to all projects and
folders in the GCP organization.
See `Enabling CMEK for Logs
Router <https://cloud.google.com/logging/docs/routing/managed-encryption>`__
for more information.
Attributes:
name (str):
Output only. The resource name of the CMEK
settings.
kms_key_name (str):
The resource name for the configured Cloud KMS key.
KMS key name format:
"projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]"
For example:
``"projects/my-project-id/locations/my-region/keyRings/key-ring-name/cryptoKeys/key-name"``
To enable CMEK for the Logs Router, set this field to a
valid ``kms_key_name`` for which the associated service
account has the required
``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned
for the key.
The Cloud KMS key used by the Log Router can be updated by
changing the ``kms_key_name`` to a new valid key name.
Encryption operations that are in progress will be completed
with the key that was in use when they started. Decryption
operations will be completed using the key that was used at
the time of encryption unless access to that key has been
revoked.
To disable CMEK for the Logs Router, set this field to an
empty string.
See `Enabling CMEK for Logs
Router <https://cloud.google.com/logging/docs/routing/managed-encryption>`__
for more information.
service_account_id (str):
Output only. The service account that will be used by the
Logs Router to access your Cloud KMS key.
Before enabling CMEK for Logs Router, you must first assign
the role ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` to
the service account that the Logs Router will use to access
your Cloud KMS key. Use
[GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]
to obtain the service account ID.
See `Enabling CMEK for Logs
Router <https://cloud.google.com/logging/docs/routing/managed-encryption>`__
for more information.
"""
name = proto.Field(
proto.STRING,
number=1,
)
kms_key_name = proto.Field(
proto.STRING,
number=2,
)
service_account_id = proto.Field(
proto.STRING,
number=3,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
cloudera/hue | refs/heads/master | desktop/core/ext-py/Django-1.11.29/tests/validation/test_error_messages.py | 43 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
from django.core.exceptions import ValidationError
from django.db import models
class ValidationMessagesTest(TestCase):
def _test_validation_messages(self, field, value, expected):
with self.assertRaises(ValidationError) as cm:
field.clean(value, None)
self.assertEqual(cm.exception.messages, expected)
def test_autofield_field_raises_error_message(self):
f = models.AutoField(primary_key=True)
self._test_validation_messages(f, 'fõo', ["'fõo' value must be an integer."])
def test_integer_field_raises_error_message(self):
f = models.IntegerField()
self._test_validation_messages(f, 'fõo', ["'fõo' value must be an integer."])
def test_boolean_field_raises_error_message(self):
f = models.BooleanField()
self._test_validation_messages(f, 'fõo', ["'fõo' value must be either True or False."])
def test_float_field_raises_error_message(self):
f = models.FloatField()
self._test_validation_messages(f, 'fõo', ["'fõo' value must be a float."])
def test_decimal_field_raises_error_message(self):
f = models.DecimalField()
self._test_validation_messages(f, 'fõo', ["'fõo' value must be a decimal number."])
def test_null_boolean_field_raises_error_message(self):
f = models.NullBooleanField()
self._test_validation_messages(f, 'fõo', ["'fõo' value must be either None, True or False."])
def test_date_field_raises_error_message(self):
f = models.DateField()
self._test_validation_messages(
f, 'fõo',
["'fõo' value has an invalid date format. It must be in YYYY-MM-DD format."]
)
self._test_validation_messages(
f, 'aaaa-10-10',
["'aaaa-10-10' value has an invalid date format. It must be in YYYY-MM-DD format."]
)
self._test_validation_messages(
f, '2011-13-10',
["'2011-13-10' value has the correct format (YYYY-MM-DD) but it is an invalid date."]
)
self._test_validation_messages(
f, '2011-10-32',
["'2011-10-32' value has the correct format (YYYY-MM-DD) but it is an invalid date."]
)
def test_datetime_field_raises_error_message(self):
f = models.DateTimeField()
# Wrong format
self._test_validation_messages(
f, 'fõo',
["'fõo' value has an invalid format. It must be in YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."]
)
# Correct format but invalid date
self._test_validation_messages(
f, '2011-10-32',
["'2011-10-32' value has the correct format (YYYY-MM-DD) but it is an invalid date."]
)
# Correct format but invalid date/time
self._test_validation_messages(
f, '2011-10-32 10:10',
["'2011-10-32 10:10' value has the correct format (YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) "
"but it is an invalid date/time."]
)
def test_time_field_raises_error_message(self):
f = models.TimeField()
# Wrong format
self._test_validation_messages(
f, 'fõo',
["'fõo' value has an invalid format. It must be in HH:MM[:ss[.uuuuuu]] format."]
)
# Correct format but invalid time
self._test_validation_messages(
f, '25:50',
["'25:50' value has the correct format (HH:MM[:ss[.uuuuuu]]) but it is an invalid time."]
)
|
redhat-cip/horizon | refs/heads/master | openstack_dashboard/management/commands/migrate_settings.py | 60 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import difflib
import imp
import optparse
import os
import shlex
import subprocess
import sys
import time
import warnings
from django.core.management.templates import BaseCommand # noqa
# Suppress DeprecationWarnings which clutter the output to the point of
# rendering it unreadable.
warnings.simplefilter('ignore')
def get_module_path(module_name):
"""Gets the module path without importing anything.
Avoids conflicts with package dependencies.
(taken from http://github.com/sitkatech/pypatch)
"""
path = sys.path
for name in module_name.split('.'):
file_pointer, path, desc = imp.find_module(name, path)
path = [path, ]
if file_pointer is not None:
file_pointer.close()
return path[0]
class DirContext(object):
"""Change directory in a context manager.
This allows changing directory and to always fall back to the previous
directory whatever happens during execution.
Usage::
with DirContext('/home/foo') as dircontext:
# Some code happening in '/home/foo'
# We are back to the previous directory.
"""
def __init__(self, dirname):
self.prevdir = os.path.abspath(os.curdir)
os.chdir(dirname)
self.curdir = os.path.abspath(os.curdir)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
os.chdir(self.prevdir)
def __str__(self):
return self.curdir
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
optparse.make_option(
'--gendiff',
action='store_true',
dest='gendiff',
default=False,
help=('Generate a diff file between local_settings.py and '
'local_settings.py.example'),
),
optparse.make_option(
'-f', '--force',
action='store_true',
dest='force',
default=False,
help=('Force destination rewriting without warning if the '
'destination file already exists.'),
),
)
help = ("Creates a local_settings.py file from the "
"local_settings.py.example template.")
time_fmt = '%Y-%m-%d %H:%M:%S %Z'
file_time_fmt = '%Y%m%d%H%M%S%Z'
local_settings_example = 'local_settings.py.example'
local_settings_file = 'local_settings.py'
local_settings_diff = 'local_settings.diff'
local_settings_reject_pattern = 'local_settings.py_%s.rej'
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
settings_file = os.path.abspath(
get_module_path(os.environ['DJANGO_SETTINGS_MODULE'])
)
self.local_settings_dir = os.path.abspath(
os.path.join(
os.path.realpath(os.path.dirname(settings_file)),
'local'
)
)
def gendiff(self, force=False):
"""Generate a diff between self.local_settings and the example file.
"""
with DirContext(self.local_settings_dir) as dircontext:
if not os.path.exists(self.local_settings_diff) or force:
with open(self.local_settings_example, 'r') as fp:
example_lines = fp.readlines()
with open(self.local_settings_file, 'r') as fp:
local_settings_lines = fp.readlines()
local_settings_example_mtime = time.strftime(
self.time_fmt,
time.localtime(
os.stat(self.local_settings_example).st_mtime)
)
local_settings_mtime = time.strftime(
self.time_fmt,
time.localtime(os.stat(self.local_settings_file).st_mtime)
)
print('generating "%s"...' % os.path.join(
dircontext.curdir,
self.local_settings_diff)
)
with open(self.local_settings_diff, 'w') as fp:
for line in difflib.unified_diff(
example_lines, local_settings_lines,
fromfile=self.local_settings_example,
tofile=self.local_settings_file,
fromfiledate=local_settings_example_mtime,
tofiledate=local_settings_mtime
):
fp.write(line)
print('\tDONE.')
sys.exit(0)
else:
sys.exit(
'"%s" already exists.' %
os.path.join(dircontext.curdir,
self.local_settings_diff)
)
def patch(self, force=False):
"""Patch local_settings.py.example with local_settings.diff.
The patch application generates the local_settings.py file (the
local_settings.py.example remains unchanged).
http://github.com/sitkatech/pypatch fails if the
local_settings.py.example file is not 100% identical to the one used to
generate the first diff so we use the patch command instead.
"""
with DirContext(self.local_settings_dir) as dircontext:
if os.path.exists(self.local_settings_diff):
if not os.path.exists(self.local_settings_file) or force:
local_settings_reject = \
self.local_settings_reject_pattern % (
time.strftime(self.file_time_fmt, time.localtime())
)
patch_cmd = shlex.split(
'patch %s %s -o %s -r %s' % (
self.local_settings_example,
self.local_settings_diff,
self.local_settings_file,
local_settings_reject
)
)
try:
subprocess.check_call(patch_cmd)
except subprocess.CalledProcessError:
if os.path.exists(local_settings_reject):
sys.exit(
'Some conflict(s) occurred. Please check "%s" '
'to find unapplied parts of the diff.\n'
'Once conflicts are solved, it is safer to '
'regenerate a newer diff with the "--gendiff" '
'option.' %
os.path.join(
dircontext.curdir,
local_settings_reject)
)
else:
sys.exit('An unhandled error occurred.')
print('Generation of "%s" successful.' % os.path.join(
dircontext.curdir,
self.local_settings_file)
)
sys.exit(0)
else:
sys.exit(
'"%s" already exists.' %
os.path.join(dircontext.curdir,
self.local_settings_file)
)
else:
sys.exit('No diff file found, please generate one with the '
'"--gendiff" option.')
def handle(self, *args, **options):
force = options.get('force')
if options.get('gendiff'):
self.gendiff(force)
else:
self.patch(force)
|
moyogo/noto-emoji | refs/heads/master | map_pua_emoji.py | 8 | #!/usr/bin/env python
#
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Modify an emoji font to map legacy PUA characters to standard ligatures."""
__author__ = '[email protected] (Roozbeh Pournader)'
import sys
from fontTools import ttLib
from nototools import font_data
import add_emoji_gsub
def get_glyph_name_from_gsub(char_seq, font):
"""Find the glyph name for ligature of a given character sequence from GSUB.
"""
cmap = font_data.get_cmap(font)
# FIXME: So many assumptions are made here.
try:
first_glyph = cmap[char_seq[0]]
rest_of_glyphs = [cmap[ch] for ch in char_seq[1:]]
except KeyError:
return None
for lookup in font['GSUB'].table.LookupList.Lookup:
ligatures = lookup.SubTable[0].ligatures
try:
for ligature in ligatures[first_glyph]:
if ligature.Component == rest_of_glyphs:
return ligature.LigGlyph
except KeyError:
continue
return None
def add_pua_cmap(source_file, target_file):
"""Add PUA characters to the cmap of the first font and save as second."""
font = ttLib.TTFont(source_file)
cmap = font_data.get_cmap(font)
for pua, (ch1, ch2) in (add_emoji_gsub.EMOJI_KEYCAPS.items()
+ add_emoji_gsub.EMOJI_FLAGS.items()):
if pua not in cmap:
glyph_name = get_glyph_name_from_gsub([ch1, ch2], font)
if glyph_name is not None:
cmap[pua] = glyph_name
font.save(target_file)
def main(argv):
"""Save the first font given to the second font."""
add_pua_cmap(argv[1], argv[2])
if __name__ == '__main__':
main(sys.argv)
|
jkburges/phantomjs | refs/heads/master | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/bot/irc_command_unittest.py | 121 | # Copyright (c) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import unittest2 as unittest
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.tool.bot.irc_command import *
from webkitpy.tool.mocktool import MockTool
from webkitpy.common.net.web_mock import MockWeb
from webkitpy.common.system.executive_mock import MockExecutive
from webkitpy.common.system.filesystem_mock import MockFileSystem
class IRCCommandTest(unittest.TestCase):
def test_whois(self):
whois = Whois()
self.assertEqual("tom: Usage: whois SEARCH_STRING",
whois.execute("tom", [], None, None))
self.assertEqual('tom: Adam Barth is "Adam Barth" <[email protected]> (:abarth) (r). Why do you ask?',
whois.execute("tom", ["Adam", "Barth"], None, None))
self.assertEqual("tom: Sorry, I don't know any contributors matching '[email protected]'.",
whois.execute("tom", ["[email protected]"], None, None))
self.assertEqual('tom: [email protected] is "Tony Gentilcore" <[email protected]> (:tonyg-cr) (r). Why do you ask?',
whois.execute("tom", ["[email protected]"], None, None))
self.assertEqual('tom: [email protected] is "Tony Gentilcore" <[email protected]> (:tonyg-cr) (r). Why do you ask?',
whois.execute("tom", ["[email protected]"], None, None))
self.assertEqual('tom: rniwa is "Ryosuke Niwa" <[email protected]> (:rniwa) (r). Why do you ask?',
whois.execute("tom", ["rniwa"], None, None))
self.assertEqual('tom: lopez is "Xan Lopez" <[email protected]> (:xan) (r). Why do you ask?',
whois.execute("tom", ["lopez"], None, None))
self.assertEqual(u'tom: Osztrogon\u00e1c is "Csaba Osztrogon\u00e1c" <[email protected]> (:ossy) (r). Why do you ask?',
whois.execute("tom", [u'Osztrogon\u00e1c'], None, None))
self.assertEqual('tom: "Vicki Murley" <[email protected]> hasn\'t told me their nick. Boo hoo :-(',
whois.execute("tom", ["[email protected]"], None, None))
self.assertEqual('tom: I\'m not sure who you mean? "Gavin Peters" <[email protected]> (:gavinp) (c) or "Gavin Barraclough" <[email protected]> (:gbarra) (r) could be \'Gavin\'.',
whois.execute("tom", ["Gavin"], None, None))
self.assertEqual('tom: More than 5 contributors match \'david\', could you be more specific?',
whois.execute("tom", ["david"], None, None))
def test_create_bug(self):
create_bug = CreateBug()
self.assertEqual("tom: Usage: create-bug BUG_TITLE",
create_bug.execute("tom", [], None, None))
example_args = ["sherrif-bot", "should", "have", "a", "create-bug", "command"]
tool = MockTool()
# MockBugzilla has a create_bug, but it logs to stderr, this avoids any logging.
tool.bugs.create_bug = lambda a, b, cc=None, assignee=None: 50004
self.assertEqual("tom: Created bug: http://example.com/50004",
create_bug.execute("tom", example_args, tool, None))
def mock_create_bug(title, description, cc=None, assignee=None):
raise Exception("Exception from bugzilla!")
tool.bugs.create_bug = mock_create_bug
self.assertEqual("tom: Failed to create bug:\nException from bugzilla!",
create_bug.execute("tom", example_args, tool, None))
def test_rollout_updates_working_copy(self):
rollout = Rollout()
tool = MockTool()
tool.executive = MockExecutive(should_log=True)
expected_logs = "MOCK run_and_throw_if_fail: ['mock-update-webkit'], cwd=/mock-checkout\n"
OutputCapture().assert_outputs(self, rollout._update_working_copy, [tool], expected_logs=expected_logs)
def test_rollout(self):
rollout = Rollout()
self.assertEqual(([1234], "testing foo"),
rollout._parse_args(["1234", "testing", "foo"]))
self.assertEqual(([554], "testing foo"),
rollout._parse_args(["r554", "testing", "foo"]))
self.assertEqual(([556, 792], "testing foo"),
rollout._parse_args(["r556", "792", "testing", "foo"]))
self.assertEqual(([128, 256], "testing foo"),
rollout._parse_args(["r128,r256", "testing", "foo"]))
self.assertEqual(([512, 1024, 2048], "testing foo"),
rollout._parse_args(["512,", "1024,2048", "testing", "foo"]))
# Test invalid argument parsing:
self.assertEqual((None, None), rollout._parse_args([]))
self.assertEqual((None, None), rollout._parse_args(["--bar", "1234"]))
# Invalid arguments result in the USAGE message.
self.assertEqual("tom: Usage: rollout SVN_REVISION [SVN_REVISIONS] REASON",
rollout.execute("tom", [], None, None))
tool = MockTool()
tool.filesystem.files["/mock-checkout/test/file/one"] = ""
tool.filesystem.files["/mock-checkout/test/file/two"] = ""
self.assertEqual("Failed to apply reverse diff for file(s): test/file/one, test/file/two",
rollout._check_diff_failure("""
Preparing rollout for bug 123456.
Updating working directory
Failed to apply reverse diff for revision 123456 because of the following conflicts:
test/file/one
test/file/two
Failed to apply reverse diff for revision 123456 because of the following conflicts:
test/file/one
test/file/two
Updating OpenSource
Current branch master is up to date.
""", tool))
self.assertEqual(None, rollout._check_diff_failure("""
Preparing rollout for bug 123456.
Updating working directory
Some other error report involving file paths:
test/file/one
test/file/two
Updating OpenSource
Current branch master is up to date.
""", tool))
# FIXME: We need a better way to test IRCCommands which call tool.irc().post()
|
gisce/libComXML | refs/heads/master | setup.py | 1 | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='libComXML',
version='3.1.0',
url='https://github.com/gisce/libComXML',
author='GISCE-TI, S.L',
author_email='[email protected]',
packages=find_packages(),
install_requires=['lxml', 'six'],
license='None',
description='This library permits XML generation from Python objects',
test_suite='tests',
classifiers=[
'Topic :: Text Processing :: Markup :: XML',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5'
]
)
|
Adel-Magebinary/odoo | refs/heads/8.0 | addons/product/tests/test_pricelist.py | 280 | from openerp.tests.common import TransactionCase
class TestPricelist(TransactionCase):
"""Tests for unit of measure conversion"""
def setUp(self):
super(TestPricelist, self).setUp()
cr, uid, context = self.cr, self.uid, {}
self.ir_model_data = self.registry('ir.model.data')
self.product_product = self.registry('product.product')
self.product_pricelist = self.registry('product.pricelist')
self.uom = self.registry('product.uom')
self.usb_adapter_id = self.ir_model_data.get_object_reference(cr, uid, 'product', 'product_product_48')[1]
self.datacard_id = self.ir_model_data.get_object_reference(cr, uid, 'product', 'product_product_46')[1]
self.unit_id = self.ir_model_data.get_object_reference(cr, uid, 'product', 'product_uom_unit')[1]
self.dozen_id = self.ir_model_data.get_object_reference(cr, uid, 'product', 'product_uom_dozen')[1]
self.tonne_id = self.ir_model_data.xmlid_to_res_id(cr, uid, 'product.product_uom_ton')
self.kg_id = self.ir_model_data.xmlid_to_res_id(cr, uid, 'product.product_uom_kgm')
self.public_pricelist_id = self.ir_model_data.get_object_reference(cr, uid, 'product', 'list0')[1]
self.sale_pricelist_id = self.product_pricelist.create(cr, uid, {
'name': 'Sale pricelist',
'type': 'sale',
'version_id': [(0, 0, {
'name': 'v1.0',
'items_id': [(0, 0, {
'name': 'Discount 10%',
'base': 1, # based on public price
'price_discount': -0.1,
'product_id': self.usb_adapter_id
}), (0, 0, {
'name': 'Discount -0.5',
'base': 1, # based on public price
'price_surcharge': -0.5,
'product_id': self.datacard_id
})]
})]
}, context=context)
def test_10_discount(self):
# Make sure the price using a pricelist is the same than without after
# applying the computation manually
cr, uid, context = self.cr, self.uid, {}
public_context = dict(context, pricelist=self.public_pricelist_id)
pricelist_context = dict(context, pricelist=self.sale_pricelist_id)
usb_adapter_without_pricelist = self.product_product.browse(cr, uid, self.usb_adapter_id, context=public_context)
usb_adapter_with_pricelist = self.product_product.browse(cr, uid, self.usb_adapter_id, context=pricelist_context)
self.assertEqual(usb_adapter_with_pricelist.price, usb_adapter_without_pricelist.price*0.9)
datacard_without_pricelist = self.product_product.browse(cr, uid, self.datacard_id, context=public_context)
datacard_with_pricelist = self.product_product.browse(cr, uid, self.datacard_id, context=pricelist_context)
self.assertEqual(datacard_with_pricelist.price, datacard_without_pricelist.price-0.5)
# Make sure that changing the unit of measure does not break the unit
# price (after converting)
unit_context = dict(context,
pricelist=self.sale_pricelist_id,
uom=self.unit_id)
dozen_context = dict(context,
pricelist=self.sale_pricelist_id,
uom=self.dozen_id)
usb_adapter_unit = self.product_product.browse(cr, uid, self.usb_adapter_id, context=unit_context)
usb_adapter_dozen = self.product_product.browse(cr, uid, self.usb_adapter_id, context=dozen_context)
self.assertAlmostEqual(usb_adapter_unit.price*12, usb_adapter_dozen.price)
datacard_unit = self.product_product.browse(cr, uid, self.datacard_id, context=unit_context)
datacard_dozen = self.product_product.browse(cr, uid, self.datacard_id, context=dozen_context)
# price_surcharge applies to product default UoM, here "Units", so surcharge will be multiplied
self.assertAlmostEqual(datacard_unit.price*12, datacard_dozen.price)
def test_20_pricelist_uom(self):
# Verify that the pricelist rules are correctly using the product's default UoM
# as reference, and return a result according to the target UoM (as specific in the context)
cr, uid = self.cr, self.uid
kg, tonne = self.kg_id, self.tonne_id
tonne_price = 100
# make sure 'tonne' resolves down to 1 'kg'.
self.uom.write(cr, uid, tonne, {'rounding': 0.001})
# setup product stored in 'tonnes', with a discounted pricelist for qty > 3 tonnes
spam_id = self.product_product.copy(cr, uid, self.usb_adapter_id,
{ 'name': '1 tonne of spam',
'uom_id': self.tonne_id,
'uos_id': self.tonne_id,
'uom_po_id': self.tonne_id,
'list_price': tonne_price,
})
pricelist_version_id = self.ir_model_data.xmlid_to_res_id(cr, uid, 'product.ver0')
self.registry('product.pricelist.item').create(cr, uid,
{ 'price_version_id': pricelist_version_id,
'sequence': 10,
'name': '3+ tonnes: -10 EUR discount/t',
'base': 1, # based on public price
'min_quantity': 3, # min = 3 tonnes
'price_surcharge': -10, # -10 EUR / tonne
'product_id': spam_id,
})
pricelist_id = self.public_pricelist_id
def test_unit_price(qty, uom, expected_unit_price):
unit_price = self.registry('product.pricelist').price_get(cr, uid, [pricelist_id],
spam_id, qty,
context={'uom': uom})[pricelist_id]
self.assertAlmostEqual(unit_price, expected_unit_price, msg='Computed unit price is wrong')
# Test prices - they are *per unit*, the quantity is only here to match the pricelist rules!
test_unit_price(2, kg, tonne_price / 1000.0)
test_unit_price(2000, kg, tonne_price / 1000.0)
test_unit_price(3500, kg, (tonne_price - 10) / 1000.0)
test_unit_price(2, tonne, tonne_price)
test_unit_price(3, tonne, tonne_price - 10)
|
mice-software/maus | refs/heads/master | src/map/MapPyPrint/test_MapPyPrint.py | 1 | #pylint: disable =C0103
"""test_MapPyPrint.py"""
import json
import unittest
from MapPyPrint import MapPyPrint
class MapPyPrintTestCase(unittest.TestCase):#pylint: disable =R0904
"""MapPyPrintTestCase"""
def test_empty(self):
"""test_empty"""
mapper = MapPyPrint()
self.assertTrue(mapper.birth("{}"))
result = mapper.process("")
doc = json.loads(result)
self.assertTrue("errors" in doc)
self.assertTrue(mapper.death())
def test_return(self):
"""test_return"""
mapper = MapPyPrint()
self.assertTrue(mapper.birth("{}"))
result = mapper.process("{}")
self.assertEqual(result, "{}")
self.assertTrue(mapper.death())
if __name__ == '__main__':
unittest.main()
|
smmribeiro/intellij-community | refs/heads/master | python/testData/pyi/lineMarkers/SimilarForFunctionStub/b.py | 819 | def foo():
pass |
ColdSauce/IsSittingOnButt | refs/heads/master | server/env/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/langthaimodel.py | 2929 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# The following result for thai was collected from a limited sample (1M).
# Character Mapping Table:
TIS620CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40
188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50
253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60
96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70
209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,
223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,
236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57,
49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54,
45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63,
22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,
11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247,
68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 92.6386%
# first 1024 sequences:7.3177%
# rest sequences: 1.0230%
# negative sequences: 0.0436%
ThaiLangModel = (
0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,
0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,
3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,
0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,
3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,
3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,
3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,
3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,
2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,
3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,
1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,
3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,
1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,
0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,
0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,
2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,
0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,
3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,
2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,
3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,
2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,
3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,
3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,
3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,
3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,
1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,
0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,
0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,
3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,
1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,
3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,
3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,
0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,
0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,
1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,
1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,
3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,
0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,
3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,
0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,
0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,
0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,
0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,
0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,
0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,
0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,
0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,
0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,
3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,
2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,
3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,
1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,
1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
)
TIS620ThaiModel = {
'charToOrderMap': TIS620CharToOrderMap,
'precedenceMatrix': ThaiLangModel,
'mTypicalPositiveRatio': 0.926386,
'keepEnglishLetter': False,
'charsetName': "TIS-620"
}
# flake8: noqa
|
apple/swift-lldb | refs/heads/stable | packages/Python/lldbsuite/test/functionalities/thread/break_after_join/TestBreakAfterJoin.py | 5 | """
Test number of threads.
"""
from __future__ import print_function
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class BreakpointAfterJoinTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number for our breakpoint.
self.breakpoint = line_number('main.cpp', '// Set breakpoint here')
@expectedFailureAll(
oslist=["linux"],
bugnumber="llvm.org/pr15824 thread states not properly maintained")
@expectedFailureAll(
oslist=lldbplatformutil.getDarwinOSTriples(),
bugnumber="llvm.org/pr15824 thread states not properly maintained and <rdar://problem/28557237>")
@expectedFailureAll(
oslist=["freebsd"],
bugnumber="llvm.org/pr18190 thread states not properly maintained")
@expectedFailureNetBSD
def test(self):
"""Test breakpoint handling after a thread join."""
self.build(dictionary=self.getBuildFlags())
exe = self.getBuildArtifact("a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# This should create a breakpoint in the main thread.
lldbutil.run_break_set_by_file_and_line(
self, "main.cpp", self.breakpoint, num_expected_locations=1)
# The breakpoint list should show 1 location.
self.expect(
"breakpoint list -f",
"Breakpoint location shown correctly",
substrs=[
"1: file = 'main.cpp', line = %d, exact_match = 0, locations = 1" %
self.breakpoint])
# Run the program.
self.runCmd("run", RUN_SUCCEEDED)
# The stop reason of the thread should be breakpoint.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs=['stopped',
'stop reason = breakpoint'])
# Get the target process
target = self.dbg.GetSelectedTarget()
process = target.GetProcess()
# The exit probably occurred during breakpoint handling, but it isn't
# guaranteed. The main thing we're testing here is that the debugger
# handles this cleanly is some way.
# Get the number of threads
num_threads = process.GetNumThreads()
# Make sure we see at least six threads
self.assertTrue(
num_threads >= 6,
'Number of expected threads and actual threads do not match.')
# Make sure all threads are stopped
for i in range(0, num_threads):
self.assertTrue(
process.GetThreadAtIndex(i).IsStopped(),
"Thread {0} didn't stop during breakpoint.".format(i))
# Run to completion
self.runCmd("continue")
# If the process hasn't exited, collect some information
if process.GetState() != lldb.eStateExited:
self.runCmd("thread list")
self.runCmd("process status")
# At this point, the inferior process should have exited.
self.assertTrue(
process.GetState() == lldb.eStateExited,
PROCESS_EXITED)
|
schwarz/youtube-dl | refs/heads/master | test/test_unicode_literals.py | 168 | from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import io
import re
rootDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
IGNORED_FILES = [
'setup.py', # http://bugs.python.org/issue13943
'conf.py',
'buildserver.py',
]
IGNORED_DIRS = [
'.git',
'.tox',
]
from test.helper import assertRegexpMatches
class TestUnicodeLiterals(unittest.TestCase):
def test_all_files(self):
for dirpath, dirnames, filenames in os.walk(rootDir):
for ignore_dir in IGNORED_DIRS:
if ignore_dir in dirnames:
# If we remove the directory from dirnames os.walk won't
# recurse into it
dirnames.remove(ignore_dir)
for basename in filenames:
if not basename.endswith('.py'):
continue
if basename in IGNORED_FILES:
continue
fn = os.path.join(dirpath, basename)
with io.open(fn, encoding='utf-8') as inf:
code = inf.read()
if "'" not in code and '"' not in code:
continue
assertRegexpMatches(
self,
code,
r'(?:(?:#.*?|\s*)\n)*from __future__ import (?:[a-z_]+,\s*)*unicode_literals',
'unicode_literals import missing in %s' % fn)
m = re.search(r'(?<=\s)u[\'"](?!\)|,|$)', code)
if m is not None:
self.assertTrue(
m is None,
'u present in %s, around %s' % (
fn, code[m.start() - 10:m.end() + 10]))
if __name__ == '__main__':
unittest.main()
|
MadManRises/Madgine | refs/heads/master | shared/bullet3-2.89/examples/pybullet/gym/pybullet_envs/deep_mimic/mocap/render_reference.py | 4 | import os, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(os.path.dirname(currentdir))
os.sys.path.insert(0, parentdir)
print('parent:', parentdir)
import pybullet_data
import pybullet
import time
import random
from pybullet_utils.bullet_client import BulletClient
from deep_mimic.env.motion_capture_data import MotionCaptureData
# from pybullet_envs.deep_mimic.env.humanoid_stable_pd import HumanoidStablePd
from humanoid import Humanoid
from humanoid import HumanoidPose
# from env.humanoid_stable_pd
from deepmimic_json_generator import *
import pybullet as p
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Arguments for loading reference for learning.')
# General arguments
parser.add_argument('--dataset_path',
default='data/data_3d_h36m.npz',
type=str,
help='target dataset') # h36m or humaneva
parser.add_argument(
'--json_path',
default='data/Walking.json',
type=str,
help='json file path for storing the deepmimic-format json created by inverse-kinect.')
parser.add_argument('--fps', default=24, type=int, help='frame per second')
parser.add_argument('--subject', default='S11', type=str, help='camera subject.')
parser.add_argument('--action', default='Walking', type=str, help='name of the action.')
parser.add_argument('--loop',
default='wrap',
type=str,
help='loop information in deepmimic, wrap or none.')
parser.add_argument('--draw_gt', action='store_true', help='draw ground truth or not.')
args = parser.parse_args()
dataset_path = args.dataset_path
json_path = args.json_path
fps = args.fps
subject = args.subject
action = args.action
loop = args.loop
draw_gt = args.draw_gt
def draw_ground_truth(coord_seq, frame, duration, shift):
global joint_info
joint = coord_seq[frame]
shift = np.array(shift)
for i in range(1, 17):
# print(x[11], x[14])
joint_fa = joint_info['father'][i]
if joint_info['side'][i] == 'right':
p.addUserDebugLine(lineFromXYZ=joint[i] + shift,
lineToXYZ=joint[joint_fa] + shift,
lineColorRGB=(255, 0, 0),
lineWidth=1,
lifeTime=duration)
else:
p.addUserDebugLine(lineFromXYZ=joint[i] + shift,
lineToXYZ=joint[joint_fa] + shift,
lineColorRGB=(0, 0, 0),
lineWidth=1,
lifeTime=duration)
dataset = init_fb_h36m_dataset(dataset_path)
ground_truth = pose3D_from_fb_h36m(dataset, subject=subject, action=action, shift=[1.0, 0.0, 0.0])
rot_seq = coord_seq_to_rot_seq(coord_seq=ground_truth, frame_duration=1 / fps)
rot_seq_to_deepmimic_json(rot_seq=rot_seq, loop=loop, json_path=json_path)
bc = BulletClient(connection_mode=pybullet.GUI)
bc.setAdditionalSearchPath(pybullet_data.getDataPath())
bc.configureDebugVisualizer(bc.COV_ENABLE_Y_AXIS_UP, 1)
bc.setGravity(0, -9.8, 0)
motion = MotionCaptureData()
motionPath = json_path
motion.Load(motionPath)
print("numFrames = ", motion.NumFrames())
simTimeId = bc.addUserDebugParameter("simTime", 0, motion.NumFrames() - 1.1, 0)
y2zOrn = bc.getQuaternionFromEuler([-1.57, 0, 0])
bc.loadURDF("plane.urdf", [0, -0.04, 0], y2zOrn)
humanoid = Humanoid(bc, motion, [0, 0, 0]) #这是初始位置的坐标
print(p.getBasePositionAndOrientation(humanoid._humanoid))
simTime = 0
keyFrameDuration = motion.KeyFrameDuraction()
print("keyFrameDuration=", keyFrameDuration)
for utNum in range(motion.NumFrames()):
bc.stepSimulation()
humanoid.RenderReference(utNum * keyFrameDuration)
if draw_gt:
draw_ground_truth(coord_seq=ground_truth,
frame=utNum,
duration=keyFrameDuration,
shift=[-1.0, 0.0, 1.0])
time.sleep(0.001)
stage = 0
def Reset(humanoid):
global simTime
humanoid.Reset()
simTime = 0
humanoid.SetSimTime(simTime)
pose = humanoid.InitializePoseFromMotionData()
humanoid.ApplyPose(pose, True, True, humanoid._humanoid, bc)
Reset(humanoid)
p.disconnect()
|
throwable-one/lettuce | refs/heads/master | tests/integration/django/rucola/manage.py | 2072 | #!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
|
nickpack/django-oscar | refs/heads/master | tests/functional/basket/manipulation_tests.py | 54 | from oscar.test.testcases import WebTestCase
from oscar.test import factories
from oscar.apps.basket import models
class TestAddingToBasket(WebTestCase):
def test_works_for_standalone_product(self):
product = factories.ProductFactory()
detail_page = self.get(product.get_absolute_url())
response = detail_page.forms['add_to_basket_form'].submit()
self.assertIsRedirect(response)
baskets = models.Basket.objects.all()
self.assertEqual(1, len(baskets))
basket = baskets[0]
self.assertEqual(1, basket.num_items)
def test_works_for_child_product(self):
parent = factories.ProductFactory(structure='parent', stockrecords=[])
for x in range(3):
factories.ProductFactory(parent=parent, structure='child')
detail_page = self.get(parent.get_absolute_url())
form = detail_page.forms['add_to_basket_form']
response = form.submit()
self.assertIsRedirect(response)
baskets = models.Basket.objects.all()
self.assertEqual(1, len(baskets))
basket = baskets[0]
self.assertEqual(1, basket.num_items)
|
fgesora/odoo | refs/heads/8.0 | addons/website_twitter/controllers/main.py | 355 | from openerp.addons.web import http
from openerp.addons.web.http import request
from openerp.tools.translate import _
import json
class Twitter(http.Controller):
@http.route(['/twitter_reload'], type='json', auth="user", website=True)
def twitter_reload(self):
return request.website.fetch_favorite_tweets()
@http.route(['/get_favorites'], type='json', auth="public", website=True)
def get_tweets(self, limit=20):
key = request.website.twitter_api_key
secret = request.website.twitter_api_secret
screen_name = request.website.twitter_screen_name
cr, uid = request.cr, request.uid
debug = request.registry['res.users'].has_group(cr, uid, 'base.group_website_publisher')
if not key or not secret:
if debug:
return {"error": _("Please set the Twitter API Key and Secret in the Website Settings.")}
return []
if not screen_name:
if debug:
return {"error": _("Please set a Twitter screen name to load favorites from, "
"in the Website Settings (it does not have to be yours)")}
return []
twitter_tweets = request.registry['website.twitter.tweet']
tweets = twitter_tweets.search_read(
cr, uid,
[('website_id','=', request.website.id),
('screen_name','=', screen_name)],
['tweet'], limit=int(limit), order="tweet_id desc", context=request.context)
if len(tweets) < 12:
if debug:
return {"error": _("Twitter user @%(username)s has less than 12 favorite tweets. "
"Please add more or choose a different screen name.") % \
{'username': screen_name}}
else:
return []
return [json.loads(tweet['tweet']) for tweet in tweets]
|
sudheerchintala/LearnEraPlatForm | refs/heads/master | common/djangoapps/third_party_auth/tests/test_provider.py | 78 | """Unit tests for provider.py."""
from third_party_auth import provider
from third_party_auth.tests import testutil
class RegistryTest(testutil.TestCase):
"""Tests registry discovery and operation."""
# Allow access to protected methods (or module-protected methods) under
# test. pylint: disable-msg=protected-access
def test_calling_configure_once_twice_raises_value_error(self):
provider.Registry.configure_once([provider.GoogleOauth2.NAME])
with self.assertRaisesRegexp(ValueError, '^.*already configured$'):
provider.Registry.configure_once([provider.GoogleOauth2.NAME])
def test_configure_once_adds_gettable_providers(self):
provider.Registry.configure_once([provider.GoogleOauth2.NAME])
self.assertIs(provider.GoogleOauth2, provider.Registry.get(provider.GoogleOauth2.NAME))
def test_configuring_provider_with_no_implementation_raises_value_error(self):
with self.assertRaisesRegexp(ValueError, '^.*no_implementation$'):
provider.Registry.configure_once(['no_implementation'])
def test_configuring_single_provider_twice_raises_value_error(self):
provider.Registry._enable(provider.GoogleOauth2)
with self.assertRaisesRegexp(ValueError, '^.*already enabled'):
provider.Registry.configure_once([provider.GoogleOauth2.NAME])
def test_custom_provider_can_be_enabled(self):
name = 'CustomProvider'
with self.assertRaisesRegexp(ValueError, '^No implementation.*$'):
provider.Registry.configure_once([name])
class CustomProvider(provider.BaseProvider):
"""Custom class to ensure BaseProvider children outside provider can be enabled."""
NAME = name
provider.Registry._reset()
provider.Registry.configure_once([CustomProvider.NAME])
self.assertEqual([CustomProvider], provider.Registry.enabled())
def test_enabled_raises_runtime_error_if_not_configured(self):
with self.assertRaisesRegexp(RuntimeError, '^.*not configured$'):
provider.Registry.enabled()
def test_enabled_returns_list_of_enabled_providers_sorted_by_name(self):
all_providers = provider.Registry._get_all()
provider.Registry.configure_once(all_providers.keys())
self.assertEqual(
sorted(all_providers.values(), key=lambda provider: provider.NAME), provider.Registry.enabled())
def test_get_raises_runtime_error_if_not_configured(self):
with self.assertRaisesRegexp(RuntimeError, '^.*not configured$'):
provider.Registry.get('anything')
def test_get_returns_enabled_provider(self):
provider.Registry.configure_once([provider.GoogleOauth2.NAME])
self.assertIs(provider.GoogleOauth2, provider.Registry.get(provider.GoogleOauth2.NAME))
def test_get_returns_none_if_provider_not_enabled(self):
provider.Registry.configure_once([])
self.assertIsNone(provider.Registry.get(provider.LinkedInOauth2.NAME))
def test_get_by_backend_name_raises_runtime_error_if_not_configured(self):
with self.assertRaisesRegexp(RuntimeError, '^.*not configured$'):
provider.Registry.get_by_backend_name('')
def test_get_by_backend_name_returns_enabled_provider(self):
provider.Registry.configure_once([provider.GoogleOauth2.NAME])
self.assertIs(
provider.GoogleOauth2,
provider.Registry.get_by_backend_name(provider.GoogleOauth2.BACKEND_CLASS.name))
def test_get_by_backend_name_returns_none_if_provider_not_enabled(self):
provider.Registry.configure_once([])
self.assertIsNone(provider.Registry.get_by_backend_name(provider.GoogleOauth2.BACKEND_CLASS.name))
|
jeffery9/mixprint_addons | refs/heads/master | stock/stock.py | 1 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from lxml import etree
from datetime import datetime
from dateutil.relativedelta import relativedelta
import time
from operator import itemgetter
from itertools import groupby
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp import netsvc
from openerp import tools
from openerp.tools import float_compare
import openerp.addons.decimal_precision as dp
import logging
_logger = logging.getLogger(__name__)
#----------------------------------------------------------
# Incoterms
#----------------------------------------------------------
class stock_incoterms(osv.osv):
_name = "stock.incoterms"
_description = "Incoterms"
_columns = {
'name': fields.char('Name', size=64, required=True, help="Incoterms are series of sales terms.They are used to divide transaction costs and responsibilities between buyer and seller and reflect state-of-the-art transportation practices."),
'code': fields.char('Code', size=3, required=True, help="Code for Incoterms"),
'active': fields.boolean('Active', help="By unchecking the active field, you may hide an INCOTERM without deleting it."),
}
_defaults = {
'active': True,
}
stock_incoterms()
class stock_journal(osv.osv):
_name = "stock.journal"
_description = "Stock Journal"
_columns = {
'name': fields.char('Stock Journal', size=32, required=True),
'user_id': fields.many2one('res.users', 'Responsible'),
}
_defaults = {
'user_id': lambda s, c, u, ctx: u
}
stock_journal()
#----------------------------------------------------------
# Stock Location
#----------------------------------------------------------
class stock_location(osv.osv):
_name = "stock.location"
_description = "Location"
_parent_name = "location_id"
_parent_store = True
_parent_order = 'posz,name'
_order = 'parent_left'
def name_get(self, cr, uid, ids, context=None):
# always return the full hierarchical name
res = self._complete_name(cr, uid, ids, 'complete_name', None, context=context)
return res.items()
def _complete_name(self, cr, uid, ids, name, args, context=None):
""" Forms complete name of location from parent location to child location.
@return: Dictionary of values
"""
res = {}
for m in self.browse(cr, uid, ids, context=context):
names = [m.name]
parent = m.location_id
while parent:
names.append(parent.name)
parent = parent.location_id
res[m.id] = ' / '.join(reversed(names))
return res
def _get_sublocations(self, cr, uid, ids, context=None):
""" return all sublocations of the given stock locations (included) """
return self.search(cr, uid, [('id', 'child_of', ids)], context=context)
def _product_value(self, cr, uid, ids, field_names, arg, context=None):
"""Computes stock value (real and virtual) for a product, as well as stock qty (real and virtual).
@param field_names: Name of field
@return: Dictionary of values
"""
prod_id = context and context.get('product_id', False)
if not prod_id:
return dict([(i, {}.fromkeys(field_names, 0.0)) for i in ids])
product_product_obj = self.pool.get('product.product')
cr.execute('select distinct product_id, location_id from stock_move where location_id in %s', (tuple(ids), ))
dict1 = cr.dictfetchall()
cr.execute('select distinct product_id, location_dest_id as location_id from stock_move where location_dest_id in %s', (tuple(ids), ))
dict2 = cr.dictfetchall()
res_products_by_location = sorted(dict1+dict2, key=itemgetter('location_id'))
products_by_location = dict((k, [v['product_id'] for v in itr]) for k, itr in groupby(res_products_by_location, itemgetter('location_id')))
result = dict([(i, {}.fromkeys(field_names, 0.0)) for i in ids])
result.update(dict([(i, {}.fromkeys(field_names, 0.0)) for i in list(set([aaa['location_id'] for aaa in res_products_by_location]))]))
currency_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.currency_id.id
currency_obj = self.pool.get('res.currency')
currency = currency_obj.browse(cr, uid, currency_id, context=context)
for loc_id, product_ids in products_by_location.items():
if prod_id:
product_ids = [prod_id]
c = (context or {}).copy()
c['location'] = loc_id
for prod in product_product_obj.browse(cr, uid, product_ids, context=c):
for f in field_names:
if f == 'stock_real':
if loc_id not in result:
result[loc_id] = {}
result[loc_id][f] += prod.qty_available
elif f == 'stock_virtual':
result[loc_id][f] += prod.virtual_available
elif f == 'stock_real_value':
amount = prod.qty_available * prod.standard_price
amount = currency_obj.round(cr, uid, currency, amount)
result[loc_id][f] += amount
elif f == 'stock_virtual_value':
amount = prod.virtual_available * prod.standard_price
amount = currency_obj.round(cr, uid, currency, amount)
result[loc_id][f] += amount
return result
_columns = {
'name': fields.char('Location Name', size=64, required=True, translate=True),
'active': fields.boolean('Active', help="By unchecking the active field, you may hide a location without deleting it."),
'usage': fields.selection([('supplier', 'Supplier Location'), ('view', 'View'), ('internal', 'Internal Location'), ('customer', 'Customer Location'), ('inventory', 'Inventory'), ('procurement', 'Procurement'), ('production', 'Production'), ('transit', 'Transit Location for Inter-Companies Transfers')], 'Location Type', required=True,
help="""* Supplier Location: Virtual location representing the source location for products coming from your suppliers
\n* View: Virtual location used to create a hierarchical structures for your warehouse, aggregating its child locations ; can't directly contain products
\n* Internal Location: Physical locations inside your own warehouses,
\n* Customer Location: Virtual location representing the destination location for products sent to your customers
\n* Inventory: Virtual location serving as counterpart for inventory operations used to correct stock levels (Physical inventories)
\n* Procurement: Virtual location serving as temporary counterpart for procurement operations when the source (supplier or production) is not known yet. This location should be empty when the procurement scheduler has finished running.
\n* Production: Virtual counterpart location for production operations: this location consumes the raw material and produces finished products
""", select = True),
# temporarily removed, as it's unused: 'allocation_method': fields.selection([('fifo', 'FIFO'), ('lifo', 'LIFO'), ('nearest', 'Nearest')], 'Allocation Method', required=True),
'complete_name': fields.function(_complete_name, type='char', size=256, string="Location Name",
store={'stock.location': (_get_sublocations, ['name', 'location_id'], 10)}),
'stock_real': fields.function(_product_value, type='float', string='Real Stock', multi="stock"),
'stock_virtual': fields.function(_product_value, type='float', string='Virtual Stock', multi="stock"),
'location_id': fields.many2one('stock.location', 'Parent Location', select=True, ondelete='cascade'),
'child_ids': fields.one2many('stock.location', 'location_id', 'Contains'),
'chained_journal_id': fields.many2one('stock.journal', 'Chaining Journal',help="Inventory Journal in which the chained move will be written, if the Chaining Type is not Transparent (no journal is used if left empty)"),
'chained_location_id': fields.many2one('stock.location', 'Chained Location If Fixed'),
'chained_location_type': fields.selection([('none', 'None'), ('customer', 'Customer'), ('fixed', 'Fixed Location')],
'Chained Location Type', required=True,
help="Determines whether this location is chained to another location, i.e. any incoming product in this location \n" \
"should next go to the chained location. The chained location is determined according to the type :"\
"\n* None: No chaining at all"\
"\n* Customer: The chained location will be taken from the Customer Location field on the Partner form of the Partner that is specified in the Picking list of the incoming products." \
"\n* Fixed Location: The chained location is taken from the next field: Chained Location if Fixed." \
),
'chained_auto_packing': fields.selection(
[('auto', 'Automatic Move'), ('manual', 'Manual Operation'), ('transparent', 'Automatic No Step Added')],
'Chaining Type',
required=True,
help="This is used only if you select a chained location type.\n" \
"The 'Automatic Move' value will create a stock move after the current one that will be "\
"validated automatically. With 'Manual Operation', the stock move has to be validated "\
"by a worker. With 'Automatic No Step Added', the location is replaced in the original move."
),
'chained_picking_type': fields.selection([('out', 'Sending Goods'), ('in', 'Getting Goods'), ('internal', 'Internal')], 'Shipping Type', help="Shipping Type of the Picking List that will contain the chained move (leave empty to automatically detect the type based on the source and destination locations)."),
'chained_company_id': fields.many2one('res.company', 'Chained Company', help='The company the Picking List containing the chained move will belong to (leave empty to use the default company determination rules'),
'chained_delay': fields.integer('Chaining Lead Time',help="Delay between original move and chained move in days"),
'partner_id': fields.many2one('res.partner', 'Location Address',help="Address of customer or supplier."),
'icon': fields.selection(tools.icons, 'Icon', size=64,help="Icon show in hierarchical tree view"),
'comment': fields.text('Additional Information'),
'posx': fields.integer('Corridor (X)',help="Optional localization details, for information purpose only"),
'posy': fields.integer('Shelves (Y)', help="Optional localization details, for information purpose only"),
'posz': fields.integer('Height (Z)', help="Optional localization details, for information purpose only"),
'parent_left': fields.integer('Left Parent', select=1),
'parent_right': fields.integer('Right Parent', select=1),
'stock_real_value': fields.function(_product_value, type='float', string='Real Stock Value', multi="stock", digits_compute=dp.get_precision('Account')),
'stock_virtual_value': fields.function(_product_value, type='float', string='Virtual Stock Value', multi="stock", digits_compute=dp.get_precision('Account')),
'company_id': fields.many2one('res.company', 'Company', select=1, help='Let this field empty if this location is shared between all companies'),
'scrap_location': fields.boolean('Scrap Location', help='Check this box to allow using this location to put scrapped/damaged goods.'),
'valuation_in_account_id': fields.many2one('account.account', 'Stock Valuation Account (Incoming)', domain = [('type','=','other')],
help="Used for real-time inventory valuation. When set on a virtual location (non internal type), "
"this account will be used to hold the value of products being moved from an internal location "
"into this location, instead of the generic Stock Output Account set on the product. "
"This has no effect for internal locations."),
'valuation_out_account_id': fields.many2one('account.account', 'Stock Valuation Account (Outgoing)', domain = [('type','=','other')],
help="Used for real-time inventory valuation. When set on a virtual location (non internal type), "
"this account will be used to hold the value of products being moved out of this location "
"and into an internal location, instead of the generic Stock Output Account set on the product. "
"This has no effect for internal locations."),
}
_defaults = {
'active': True,
'usage': 'internal',
'chained_location_type': 'none',
'chained_auto_packing': 'manual',
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.location', context=c),
'posx': 0,
'posy': 0,
'posz': 0,
'icon': False,
'scrap_location': False,
}
def chained_location_get(self, cr, uid, location, partner=None, product=None, context=None):
""" Finds chained location
@param location: Location id
@param partner: Partner id
@param product: Product id
@return: List of values
"""
result = None
if location.chained_location_type == 'customer':
if partner:
result = partner.property_stock_customer
elif location.chained_location_type == 'fixed':
result = location.chained_location_id
if result:
return result, location.chained_auto_packing, location.chained_delay, location.chained_journal_id and location.chained_journal_id.id or False, location.chained_company_id and location.chained_company_id.id or False, location.chained_picking_type, False
return result
def picking_type_get(self, cr, uid, from_location, to_location, context=None):
""" Gets type of picking.
@param from_location: Source location
@param to_location: Destination location
@return: Location type
"""
result = 'internal'
if (from_location.usage=='internal') and (to_location and to_location.usage in ('customer', 'supplier')):
result = 'out'
elif (from_location.usage in ('supplier', 'customer')) and (to_location.usage == 'internal'):
result = 'in'
return result
def _product_get_all_report(self, cr, uid, ids, product_ids=False, context=None):
return self._product_get_report(cr, uid, ids, product_ids, context, recursive=True)
def _product_get_report(self, cr, uid, ids, product_ids=False,
context=None, recursive=False):
""" Finds the product quantity and price for particular location.
@param product_ids: Ids of product
@param recursive: True or False
@return: Dictionary of values
"""
if context is None:
context = {}
product_obj = self.pool.get('product.product')
# Take the user company and pricetype
context['currency_id'] = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id.id
# To be able to offer recursive or non-recursive reports we need to prevent recursive quantities by default
context['compute_child'] = False
if not product_ids:
product_ids = product_obj.search(cr, uid, [], context={'active_test': False})
products = product_obj.browse(cr, uid, product_ids, context=context)
products_by_uom = {}
products_by_id = {}
for product in products:
products_by_uom.setdefault(product.uom_id.id, [])
products_by_uom[product.uom_id.id].append(product)
products_by_id.setdefault(product.id, [])
products_by_id[product.id] = product
result = {}
result['product'] = []
for id in ids:
quantity_total = 0.0
total_price = 0.0
for uom_id in products_by_uom.keys():
fnc = self._product_get
if recursive:
fnc = self._product_all_get
ctx = context.copy()
ctx['uom'] = uom_id
qty = fnc(cr, uid, id, [x.id for x in products_by_uom[uom_id]],
context=ctx)
for product_id in qty.keys():
if not qty[product_id]:
continue
product = products_by_id[product_id]
quantity_total += qty[product_id]
# Compute based on pricetype
# Choose the right filed standard_price to read
amount_unit = product.price_get('standard_price', context=context)[product.id]
price = qty[product_id] * amount_unit
total_price += price
result['product'].append({
'price': amount_unit,
'prod_name': product.name,
'code': product.default_code, # used by lot_overview_all report!
'variants': product.variants or '',
'uom': product.uom_id.name,
'prod_qty': qty[product_id],
'price_value': price,
})
result['total'] = quantity_total
result['total_price'] = total_price
return result
def _product_get_multi_location(self, cr, uid, ids, product_ids=False, context=None,
states=['done'], what=('in', 'out')):
"""
@param product_ids: Ids of product
@param states: List of states
@param what: Tuple of
@return:
"""
product_obj = self.pool.get('product.product')
if context is None:
context = {}
context.update({
'states': states,
'what': what,
'location': ids
})
return product_obj.get_product_available(cr, uid, product_ids, context=context)
def _product_get(self, cr, uid, id, product_ids=False, context=None, states=None):
"""
@param product_ids:
@param states:
@return:
"""
if states is None:
states = ['done']
ids = id and [id] or []
return self._product_get_multi_location(cr, uid, ids, product_ids, context=context, states=states)
def _product_all_get(self, cr, uid, id, product_ids=False, context=None, states=None):
if states is None:
states = ['done']
# build the list of ids of children of the location given by id
ids = id and [id] or []
location_ids = self.search(cr, uid, [('location_id', 'child_of', ids)])
return self._product_get_multi_location(cr, uid, location_ids, product_ids, context, states)
def _product_virtual_get(self, cr, uid, id, product_ids=False, context=None, states=None):
if states is None:
states = ['done']
return self._product_all_get(cr, uid, id, product_ids, context, ['confirmed', 'waiting', 'assigned', 'done'])
def _product_reserve(self, cr, uid, ids, product_id, product_qty, context=None, lock=False):
"""
Attempt to find a quantity ``product_qty`` (in the product's default uom or the uom passed in ``context``) of product ``product_id``
in locations with id ``ids`` and their child locations. If ``lock`` is True, the stock.move lines
of product with id ``product_id`` in the searched location will be write-locked using Postgres's
"FOR UPDATE NOWAIT" option until the transaction is committed or rolled back, to prevent reservin
twice the same products.
If ``lock`` is True and the lock cannot be obtained (because another transaction has locked some of
the same stock.move lines), a log line will be output and False will be returned, as if there was
not enough stock.
:param product_id: Id of product to reserve
:param product_qty: Quantity of product to reserve (in the product's default uom or the uom passed in ``context``)
:param lock: if True, the stock.move lines of product with id ``product_id`` in all locations (and children locations) with ``ids`` will
be write-locked using postgres's "FOR UPDATE NOWAIT" option until the transaction is committed or rolled back. This is
to prevent reserving twice the same products.
:param context: optional context dictionary: if a 'uom' key is present it will be used instead of the default product uom to
compute the ``product_qty`` and in the return value.
:return: List of tuples in the form (qty, location_id) with the (partial) quantities that can be taken in each location to
reach the requested product_qty (``qty`` is expressed in the default uom of the product), of False if enough
products could not be found, or the lock could not be obtained (and ``lock`` was True).
"""
result = []
amount = 0.0
if context is None:
context = {}
uom_obj = self.pool.get('product.uom')
uom_rounding = self.pool.get('product.product').browse(cr, uid, product_id, context=context).uom_id.rounding
if context.get('uom'):
uom_rounding = uom_obj.browse(cr, uid, context.get('uom'), context=context).rounding
for id in self.search(cr, uid, [('location_id', 'child_of', ids)]):
if lock:
try:
# Must lock with a separate select query because FOR UPDATE can't be used with
# aggregation/group by's (when individual rows aren't identifiable).
# We use a SAVEPOINT to be able to rollback this part of the transaction without
# failing the whole transaction in case the LOCK cannot be acquired.
cr.execute("SAVEPOINT stock_location_product_reserve")
cr.execute("""SELECT id FROM stock_move
WHERE product_id=%s AND
(
(location_dest_id=%s AND
location_id<>%s AND
state='done')
OR
(location_id=%s AND
location_dest_id<>%s AND
state in ('done', 'assigned'))
)
FOR UPDATE of stock_move NOWAIT""", (product_id, id, id, id, id), log_exceptions=False)
except Exception:
# Here it's likely that the FOR UPDATE NOWAIT failed to get the LOCK,
# so we ROLLBACK to the SAVEPOINT to restore the transaction to its earlier
# state, we return False as if the products were not available, and log it:
cr.execute("ROLLBACK TO stock_location_product_reserve")
_logger.warning("Failed attempt to reserve %s x product %s, likely due to another transaction already in progress. Next attempt is likely to work. Detailed error available at DEBUG level.", product_qty, product_id)
_logger.debug("Trace of the failed product reservation attempt: ", exc_info=True)
return False
# XXX TODO: rewrite this with one single query, possibly even the quantity conversion
cr.execute("""SELECT product_uom, sum(product_qty) AS product_qty
FROM stock_move
WHERE location_dest_id=%s AND
location_id<>%s AND
product_id=%s AND
state='done'
GROUP BY product_uom
""",
(id, id, product_id))
results = cr.dictfetchall()
cr.execute("""SELECT product_uom,-sum(product_qty) AS product_qty
FROM stock_move
WHERE location_id=%s AND
location_dest_id<>%s AND
product_id=%s AND
state in ('done', 'assigned')
GROUP BY product_uom
""",
(id, id, product_id))
results += cr.dictfetchall()
total = 0.0
results2 = 0.0
for r in results:
amount = uom_obj._compute_qty(cr, uid, r['product_uom'], r['product_qty'], context.get('uom', False))
results2 += amount
total += amount
if total <= 0.0:
continue
amount = results2
compare_qty = float_compare(amount, 0, precision_rounding=uom_rounding)
if compare_qty == 1:
if amount > min(total, product_qty):
amount = min(product_qty, total)
result.append((amount, id))
product_qty -= amount
total -= amount
if product_qty <= 0.0:
return result
if total <= 0.0:
continue
return False
stock_location()
class stock_tracking(osv.osv):
_name = "stock.tracking"
_description = "Packs"
def checksum(sscc):
salt = '31' * 8 + '3'
sum = 0
for sscc_part, salt_part in zip(sscc, salt):
sum += int(sscc_part) * int(salt_part)
return (10 - (sum % 10)) % 10
checksum = staticmethod(checksum)
def make_sscc(self, cr, uid, context=None):
sequence = self.pool.get('ir.sequence').get(cr, uid, 'stock.lot.tracking')
try:
return sequence + str(self.checksum(sequence))
except Exception:
return sequence
_columns = {
'name': fields.char('Pack Reference', size=64, required=True, select=True, help="By default, the pack reference is generated following the sscc standard. (Serial number + 1 check digit)"),
'active': fields.boolean('Active', help="By unchecking the active field, you may hide a pack without deleting it."),
'serial': fields.char('Additional Reference', size=64, select=True, help="Other reference or serial number"),
'move_ids': fields.one2many('stock.move', 'tracking_id', 'Moves for this pack', readonly=True),
'date': fields.datetime('Creation Date', required=True),
}
_defaults = {
'active': 1,
'name': make_sscc,
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
}
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
ids = self.search(cr, user, [('serial', '=', name)]+ args, limit=limit, context=context)
ids += self.search(cr, user, [('name', operator, name)]+ args, limit=limit, context=context)
return self.name_get(cr, user, ids, context)
def name_get(self, cr, uid, ids, context=None):
"""Append the serial to the name"""
if not len(ids):
return []
res = [ (r['id'], r['serial'] and '%s [%s]' % (r['name'], r['serial'])
or r['name'] )
for r in self.read(cr, uid, ids, ['name', 'serial'],
context=context) ]
return res
def unlink(self, cr, uid, ids, context=None):
raise osv.except_osv(_('Error!'), _('You cannot remove a lot line.'))
def action_traceability(self, cr, uid, ids, context=None):
""" It traces the information of a product
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of IDs selected
@param context: A standard dictionary
@return: A dictionary of values
"""
return self.pool.get('action.traceability').action_traceability(cr,uid,ids,context)
stock_tracking()
#----------------------------------------------------------
# Stock Picking
#----------------------------------------------------------
class stock_picking(osv.osv):
_name = "stock.picking"
_inherit = ['mail.thread']
_description = "Picking List"
def _set_maximum_date(self, cr, uid, ids, name, value, arg, context=None):
""" Calculates planned date if it is greater than 'value'.
@param name: Name of field
@param value: Value of field
@param arg: User defined argument
@return: True or False
"""
if not value:
return False
if isinstance(ids, (int, long)):
ids = [ids]
for pick in self.browse(cr, uid, ids, context=context):
sql_str = """update stock_move set
date='%s'
where
picking_id=%d """ % (value, pick.id)
if pick.max_date:
sql_str += " and (date='" + pick.max_date + "' or date>'" + value + "')"
cr.execute(sql_str)
return True
def _set_minimum_date(self, cr, uid, ids, name, value, arg, context=None):
""" Calculates planned date if it is less than 'value'.
@param name: Name of field
@param value: Value of field
@param arg: User defined argument
@return: True or False
"""
if not value:
return False
if isinstance(ids, (int, long)):
ids = [ids]
for pick in self.browse(cr, uid, ids, context=context):
sql_str = """update stock_move set
date='%s'
where
picking_id=%s """ % (value, pick.id)
if pick.min_date:
sql_str += " and (date='" + pick.min_date + "' or date<'" + value + "')"
cr.execute(sql_str)
return True
def get_min_max_date(self, cr, uid, ids, field_name, arg, context=None):
""" Finds minimum and maximum dates for picking.
@return: Dictionary of values
"""
res = {}
for id in ids:
res[id] = {'min_date': False, 'max_date': False}
if not ids:
return res
cr.execute("""select
picking_id,
min(date_expected),
max(date_expected)
from
stock_move
where
picking_id IN %s
group by
picking_id""",(tuple(ids),))
for pick, dt1, dt2 in cr.fetchall():
res[pick]['min_date'] = dt1
res[pick]['max_date'] = dt2
return res
def create(self, cr, user, vals, context=None):
if ('name' not in vals) or (vals.get('name')=='/'):
seq_obj_name = self._name
vals['name'] = self.pool.get('ir.sequence').get(cr, user, seq_obj_name)
new_id = super(stock_picking, self).create(cr, user, vals, context)
return new_id
_columns = {
'name': fields.char('Reference', size=64, select=True, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'origin': fields.char('Source Document', size=64, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="Reference of the document", select=True),
'backorder_id': fields.many2one('stock.picking', 'Back Order of', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True),
'type': fields.selection([('out', 'Sending Goods'), ('in', 'Getting Goods'), ('internal', 'Internal')], 'Shipping Type', required=True, select=True, help="Shipping type specify, goods coming in or going out."),
'note': fields.text('Notes', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'stock_journal_id': fields.many2one('stock.journal','Stock Journal', select=True, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'location_id': fields.many2one('stock.location', 'Location', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="Keep empty if you produce at the location where the finished products are needed." \
"Set a location if you produce at a fixed location. This can be a partner location " \
"if you subcontract the manufacturing operations.", select=True),
'location_dest_id': fields.many2one('stock.location', 'Dest. Location', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="Location where the system will stock the finished products.", select=True),
'move_type': fields.selection([('direct', 'Partial'), ('one', 'All at once')], 'Delivery Method', required=True, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="It specifies goods to be deliver partially or all at once"),
'state': fields.selection([
('draft', 'Draft'),
('cancel', 'Cancelled'),
('auto', 'Waiting Another Operation'),
('confirmed', 'Waiting Availability'),
('assigned', 'Ready to Transfer'),
('done', 'Transferred'),
], 'Status', readonly=True, select=True, track_visibility='onchange', help="""
* Draft: not confirmed yet and will not be scheduled until confirmed\n
* Waiting Another Operation: waiting for another move to proceed before it becomes automatically available (e.g. in Make-To-Order flows)\n
* Waiting Availability: still waiting for the availability of products\n
* Ready to Transfer: products reserved, simply waiting for confirmation.\n
* Transferred: has been processed, can't be modified or cancelled anymore\n
* Cancelled: has been cancelled, can't be confirmed anymore"""
),
'min_date': fields.function(get_min_max_date, fnct_inv=_set_minimum_date, multi="min_max_date",
store=True, type='datetime', string='Scheduled Time', select=1, help="Scheduled time for the shipment to be processed"),
'date': fields.datetime('Time', help="Creation time, usually the time of the order.", select=True, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'date_done': fields.datetime('Date of Transfer', help="Date of Completion", states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'max_date': fields.function(get_min_max_date, fnct_inv=_set_maximum_date, multi="min_max_date",
store=True, type='datetime', string='Max. Expected Date', select=2),
'move_lines': fields.one2many('stock.move', 'picking_id', 'Internal Moves', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}),
'product_id': fields.related('move_lines', 'product_id', type='many2one', relation='product.product', string='Product'),
'auto_picking': fields.boolean('Auto-Picking', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'partner_id': fields.many2one('res.partner', 'Partner', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
'invoice_state': fields.selection([
("invoiced", "Invoiced"),
("2binvoiced", "To Be Invoiced"),
("none", "Not Applicable")], "Invoice Control",
select=True, required=True, readonly=True, track_visibility='onchange', states={'draft': [('readonly', False)]}),
'company_id': fields.many2one('res.company', 'Company', required=True, select=True, states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}),
}
_defaults = {
'name': lambda self, cr, uid, context: '/',
'state': 'draft',
'move_type': 'direct',
'type': 'internal',
'invoice_state': 'none',
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.picking', context=c)
}
_sql_constraints = [
('name_uniq', 'unique(name, company_id)', 'Reference must be unique per Company!'),
]
def action_process(self, cr, uid, ids, context=None):
if context is None:
context = {}
"""Open the partial picking wizard"""
context.update({
'active_model': self._name,
'active_ids': ids,
'active_id': len(ids) and ids[0] or False
})
return {
'view_type': 'form',
'view_mode': 'form',
'res_model': 'stock.partial.picking',
'type': 'ir.actions.act_window',
'target': 'new',
'context': context,
'nodestroy': True,
}
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
default = default.copy()
picking_obj = self.browse(cr, uid, id, context=context)
move_obj=self.pool.get('stock.move')
if ('name' not in default) or (picking_obj.name=='/'):
seq_obj_name = 'stock.picking.' + picking_obj.type
default['name'] = self.pool.get('ir.sequence').get(cr, uid, seq_obj_name)
default['origin'] = ''
default['backorder_id'] = False
if 'invoice_state' not in default and picking_obj.invoice_state == 'invoiced':
default['invoice_state'] = '2binvoiced'
res=super(stock_picking, self).copy(cr, uid, id, default, context)
if res:
picking_obj = self.browse(cr, uid, res, context=context)
for move in picking_obj.move_lines:
move_obj.write(cr, uid, [move.id], {'tracking_id': False,'prodlot_id':False, 'move_history_ids2': [(6, 0, [])], 'move_history_ids': [(6, 0, [])]})
return res
def fields_view_get(self, cr, uid, view_id=None, view_type=False, context=None, toolbar=False, submenu=False):
if view_type == 'form' and not view_id:
mod_obj = self.pool.get('ir.model.data')
if self._name == "stock.picking.in":
model,view_id = mod_obj.get_object_reference(cr, uid, 'stock', 'view_picking_in_form')
if self._name == "stock.picking.out":
model,view_id = mod_obj.get_object_reference(cr, uid, 'stock', 'view_picking_out_form')
return super(stock_picking,self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu)
def onchange_partner_in(self, cr, uid, ids, partner_id=None, context=None):
return {}
def action_explode(self, cr, uid, moves, context=None):
"""Hook to allow other modules to split the moves of a picking."""
return moves
def action_confirm(self, cr, uid, ids, context=None):
""" Confirms picking.
@return: True
"""
pickings = self.browse(cr, uid, ids, context=context)
self.write(cr, uid, ids, {'state': 'confirmed'})
todo = []
for picking in pickings:
for r in picking.move_lines:
if r.state == 'draft':
todo.append(r.id)
todo = self.action_explode(cr, uid, todo, context)
if len(todo):
self.pool.get('stock.move').action_confirm(cr, uid, todo, context=context)
return True
def test_auto_picking(self, cr, uid, ids):
# TODO: Check locations to see if in the same location ?
return True
def action_assign(self, cr, uid, ids, *args):
""" Changes state of picking to available if all moves are confirmed.
@return: True
"""
wf_service = netsvc.LocalService("workflow")
for pick in self.browse(cr, uid, ids):
if pick.state == 'draft':
wf_service.trg_validate(uid, 'stock.picking', pick.id, 'button_confirm', cr)
move_ids = [x.id for x in pick.move_lines if x.state == 'confirmed']
if not move_ids:
raise osv.except_osv(_('Warning!'),_('Not enough stock, unable to reserve the products.'))
self.pool.get('stock.move').action_assign(cr, uid, move_ids)
return True
def force_assign(self, cr, uid, ids, *args):
""" Changes state of picking to available if moves are confirmed or waiting.
@return: True
"""
wf_service = netsvc.LocalService("workflow")
for pick in self.browse(cr, uid, ids):
move_ids = [x.id for x in pick.move_lines if x.state in ['confirmed','waiting']]
self.pool.get('stock.move').force_assign(cr, uid, move_ids)
wf_service.trg_write(uid, 'stock.picking', pick.id, cr)
return True
def draft_force_assign(self, cr, uid, ids, *args):
""" Confirms picking directly from draft state.
@return: True
"""
wf_service = netsvc.LocalService("workflow")
for pick in self.browse(cr, uid, ids):
if not pick.move_lines:
raise osv.except_osv(_('Error!'),_('You cannot process picking without stock moves.'))
wf_service.trg_validate(uid, 'stock.picking', pick.id,
'button_confirm', cr)
return True
def draft_validate(self, cr, uid, ids, context=None):
""" Validates picking directly from draft state.
@return: True
"""
wf_service = netsvc.LocalService("workflow")
self.draft_force_assign(cr, uid, ids)
for pick in self.browse(cr, uid, ids, context=context):
move_ids = [x.id for x in pick.move_lines]
self.pool.get('stock.move').force_assign(cr, uid, move_ids)
wf_service.trg_write(uid, 'stock.picking', pick.id, cr)
return self.action_process(
cr, uid, ids, context=context)
def cancel_assign(self, cr, uid, ids, *args):
""" Cancels picking and moves.
@return: True
"""
wf_service = netsvc.LocalService("workflow")
for pick in self.browse(cr, uid, ids):
move_ids = [x.id for x in pick.move_lines]
self.pool.get('stock.move').cancel_assign(cr, uid, move_ids)
wf_service.trg_write(uid, 'stock.picking', pick.id, cr)
return True
def action_assign_wkf(self, cr, uid, ids, context=None):
""" Changes picking state to assigned.
@return: True
"""
self.write(cr, uid, ids, {'state': 'assigned'})
return True
def test_finished(self, cr, uid, ids):
""" Tests whether the move is in done or cancel state or not.
@return: True or False
"""
move_ids = self.pool.get('stock.move').search(cr, uid, [('picking_id', 'in', ids)])
for move in self.pool.get('stock.move').browse(cr, uid, move_ids):
if move.state not in ('done', 'cancel'):
if move.product_qty != 0.0:
return False
else:
move.write({'state': 'done'})
return True
def test_assigned(self, cr, uid, ids):
""" Tests whether the move is in assigned state or not.
@return: True or False
"""
#TOFIX: assignment of move lines should be call before testing assigment otherwise picking never gone in assign state
ok = True
for pick in self.browse(cr, uid, ids):
mt = pick.move_type
# incomming shipments are always set as available if they aren't chained
if pick.type == 'in':
if all([x.state != 'waiting' for x in pick.move_lines]):
return True
for move in pick.move_lines:
if (move.state in ('confirmed', 'draft')) and (mt == 'one'):
return False
if (mt == 'direct') and (move.state == 'assigned') and (move.product_qty):
return True
ok = ok and (move.state in ('cancel', 'done', 'assigned'))
return ok
def action_cancel(self, cr, uid, ids, context=None):
""" Changes picking state to cancel.
@return: True
"""
for pick in self.browse(cr, uid, ids, context=context):
ids2 = [move.id for move in pick.move_lines]
self.pool.get('stock.move').action_cancel(cr, uid, ids2, context)
self.write(cr, uid, ids, {'state': 'cancel', 'invoice_state': 'none'})
return True
#
# TODO: change and create a move if not parents
#
def action_done(self, cr, uid, ids, context=None):
"""Changes picking state to done.
This method is called at the end of the workflow by the activity "done".
@return: True
"""
self.write(cr, uid, ids, {'state': 'done', 'date_done': time.strftime('%Y-%m-%d %H:%M:%S')})
return True
def action_move(self, cr, uid, ids, context=None):
"""Process the Stock Moves of the Picking
This method is called by the workflow by the activity "move".
Normally that happens when the signal button_done is received (button
"Done" pressed on a Picking view).
@return: True
"""
for pick in self.browse(cr, uid, ids, context=context):
todo = []
for move in pick.move_lines:
if move.state == 'draft':
self.pool.get('stock.move').action_confirm(cr, uid, [move.id],
context=context)
todo.append(move.id)
elif move.state in ('assigned','confirmed'):
todo.append(move.id)
if len(todo):
self.pool.get('stock.move').action_done(cr, uid, todo,
context=context)
return True
def get_currency_id(self, cr, uid, picking):
return False
def _get_partner_to_invoice(self, cr, uid, picking, context=None):
""" Gets the partner that will be invoiced
Note that this function is inherited in the sale and purchase modules
@param picking: object of the picking for which we are selecting the partner to invoice
@return: object of the partner to invoice
"""
return picking.partner_id and picking.partner_id.id
def _get_comment_invoice(self, cr, uid, picking):
"""
@return: comment string for invoice
"""
return picking.note or ''
def _get_price_unit_invoice(self, cr, uid, move_line, type, context=None):
""" Gets price unit for invoice
@param move_line: Stock move lines
@param type: Type of invoice
@return: The price unit for the move line
"""
if context is None:
context = {}
if type in ('in_invoice', 'in_refund'):
# Take the user company and pricetype
context['currency_id'] = move_line.company_id.currency_id.id
amount_unit = move_line.product_id.price_get('standard_price', context=context)[move_line.product_id.id]
return amount_unit
else:
return move_line.product_id.list_price
def _get_discount_invoice(self, cr, uid, move_line):
'''Return the discount for the move line'''
return 0.0
def _get_taxes_invoice(self, cr, uid, move_line, type):
""" Gets taxes on invoice
@param move_line: Stock move lines
@param type: Type of invoice
@return: Taxes Ids for the move line
"""
if type in ('in_invoice', 'in_refund'):
taxes = move_line.product_id.supplier_taxes_id
else:
taxes = move_line.product_id.taxes_id
if move_line.picking_id and move_line.picking_id.partner_id and move_line.picking_id.partner_id.id:
return self.pool.get('account.fiscal.position').map_tax(
cr,
uid,
move_line.picking_id.partner_id.property_account_position,
taxes
)
else:
return map(lambda x: x.id, taxes)
def _get_account_analytic_invoice(self, cr, uid, picking, move_line):
return False
def _invoice_line_hook(self, cr, uid, move_line, invoice_line_id):
'''Call after the creation of the invoice line'''
return
def _invoice_hook(self, cr, uid, picking, invoice_id):
'''Call after the creation of the invoice'''
return
def _get_invoice_type(self, pick):
src_usage = dest_usage = None
inv_type = None
if pick.invoice_state == '2binvoiced':
if pick.move_lines:
src_usage = pick.move_lines[0].location_id.usage
dest_usage = pick.move_lines[0].location_dest_id.usage
if pick.type == 'out' and dest_usage == 'supplier':
inv_type = 'in_refund'
elif pick.type == 'out' and dest_usage == 'customer':
inv_type = 'out_invoice'
elif pick.type == 'in' and src_usage == 'supplier':
inv_type = 'in_invoice'
elif pick.type == 'in' and src_usage == 'customer':
inv_type = 'out_refund'
else:
inv_type = 'out_invoice'
return inv_type
def _prepare_invoice_group(self, cr, uid, picking, partner, invoice, context=None):
""" Builds the dict for grouped invoices
@param picking: picking object
@param partner: object of the partner to invoice (not used here, but may be usefull if this function is inherited)
@param invoice: object of the invoice that we are updating
@return: dict that will be used to update the invoice
"""
comment = self._get_comment_invoice(cr, uid, picking)
return {
'name': (invoice.name or '') + ', ' + (picking.name or ''),
'origin': (invoice.origin or '') + ', ' + (picking.name or '') + (picking.origin and (':' + picking.origin) or ''),
'comment': (comment and (invoice.comment and invoice.comment + "\n" + comment or comment)) or (invoice.comment and invoice.comment or ''),
'date_invoice': context.get('date_inv', False),
'user_id': uid,
}
def _prepare_invoice(self, cr, uid, picking, partner, inv_type, journal_id, context=None):
""" Builds the dict containing the values for the invoice
@param picking: picking object
@param partner: object of the partner to invoice
@param inv_type: type of the invoice ('out_invoice', 'in_invoice', ...)
@param journal_id: ID of the accounting journal
@return: dict that will be used to create the invoice object
"""
if isinstance(partner, int):
partner = self.pool.get('res.partner').browse(cr, uid, partner, context=context)
if inv_type in ('out_invoice', 'out_refund'):
account_id = partner.property_account_receivable.id
payment_term = partner.property_payment_term.id or False
else:
account_id = partner.property_account_payable.id
payment_term = partner.property_supplier_payment_term.id or False
comment = self._get_comment_invoice(cr, uid, picking)
invoice_vals = {
'name': picking.name,
'origin': (picking.name or '') + (picking.origin and (':' + picking.origin) or ''),
'type': inv_type,
'account_id': account_id,
'partner_id': partner.id,
'comment': comment,
'payment_term': payment_term,
'fiscal_position': partner.property_account_position.id,
'date_invoice': context.get('date_inv', False),
'company_id': picking.company_id.id,
'user_id': uid,
}
cur_id = self.get_currency_id(cr, uid, picking)
if cur_id:
invoice_vals['currency_id'] = cur_id
if journal_id:
invoice_vals['journal_id'] = journal_id
return invoice_vals
def _prepare_invoice_line(self, cr, uid, group, picking, move_line, invoice_id,
invoice_vals, context=None):
""" Builds the dict containing the values for the invoice line
@param group: True or False
@param picking: picking object
@param: move_line: move_line object
@param: invoice_id: ID of the related invoice
@param: invoice_vals: dict used to created the invoice
@return: dict that will be used to create the invoice line
"""
if group:
name = (picking.name or '') + '-' + move_line.name
else:
name = move_line.name
origin = move_line.picking_id.name or ''
if move_line.picking_id.origin:
origin += ':' + move_line.picking_id.origin
if invoice_vals['type'] in ('out_invoice', 'out_refund'):
account_id = move_line.product_id.property_account_income.id
if not account_id:
account_id = move_line.product_id.categ_id.\
property_account_income_categ.id
else:
account_id = move_line.product_id.property_account_expense.id
if not account_id:
account_id = move_line.product_id.categ_id.\
property_account_expense_categ.id
if invoice_vals['fiscal_position']:
fp_obj = self.pool.get('account.fiscal.position')
fiscal_position = fp_obj.browse(cr, uid, invoice_vals['fiscal_position'], context=context)
account_id = fp_obj.map_account(cr, uid, fiscal_position, account_id)
# set UoS if it's a sale and the picking doesn't have one
uos_id = move_line.product_uos and move_line.product_uos.id or False
if not uos_id and invoice_vals['type'] in ('out_invoice', 'out_refund'):
uos_id = move_line.product_uom.id
return {
'name': name,
'origin': origin,
'invoice_id': invoice_id,
'uos_id': uos_id,
'product_id': move_line.product_id.id,
'account_id': account_id,
'price_unit': self._get_price_unit_invoice(cr, uid, move_line, invoice_vals['type']),
'discount': self._get_discount_invoice(cr, uid, move_line),
'quantity': move_line.product_qty,
'invoice_line_tax_id': [(6, 0, self._get_taxes_invoice(cr, uid, move_line, invoice_vals['type']))],
'account_analytic_id': self._get_account_analytic_invoice(cr, uid, picking, move_line),
}
def action_invoice_create(self, cr, uid, ids, journal_id=False,
group=False, type='out_invoice', context=None):
""" Creates invoice based on the invoice state selected for picking.
@param journal_id: Id of journal
@param group: Whether to create a group invoice or not
@param type: Type invoice to be created
@return: Ids of created invoices for the pickings
"""
if context is None:
context = {}
invoice_obj = self.pool.get('account.invoice')
invoice_line_obj = self.pool.get('account.invoice.line')
partner_obj = self.pool.get('res.partner')
invoices_group = {}
res = {}
inv_type = type
for picking in self.browse(cr, uid, ids, context=context):
if picking.invoice_state != '2binvoiced':
continue
partner = self._get_partner_to_invoice(cr, uid, picking, context=context)
if isinstance(partner, int):
partner = partner_obj.browse(cr, uid, [partner], context=context)[0]
if not partner:
raise osv.except_osv(_('Error, no partner !'),
_('Please put a partner on the picking list if you want to generate invoice.'))
if not inv_type:
inv_type = self._get_invoice_type(picking)
if group and partner.id in invoices_group:
invoice_id = invoices_group[partner.id]
invoice = invoice_obj.browse(cr, uid, invoice_id)
invoice_vals_group = self._prepare_invoice_group(cr, uid, picking, partner, invoice, context=context)
invoice_obj.write(cr, uid, [invoice_id], invoice_vals_group, context=context)
else:
invoice_vals = self._prepare_invoice(cr, uid, picking, partner, inv_type, journal_id, context=context)
invoice_id = invoice_obj.create(cr, uid, invoice_vals, context=context)
invoices_group[partner.id] = invoice_id
res[picking.id] = invoice_id
for move_line in picking.move_lines:
if move_line.state == 'cancel':
continue
if move_line.scrapped:
# do no invoice scrapped products
continue
vals = self._prepare_invoice_line(cr, uid, group, picking, move_line,
invoice_id, invoice_vals, context=context)
if vals:
invoice_line_id = invoice_line_obj.create(cr, uid, vals, context=context)
self._invoice_line_hook(cr, uid, move_line, invoice_line_id)
invoice_obj.button_compute(cr, uid, [invoice_id], context=context,
set_total=(inv_type in ('in_invoice', 'in_refund')))
self.write(cr, uid, [picking.id], {
'invoice_state': 'invoiced',
}, context=context)
self._invoice_hook(cr, uid, picking, invoice_id)
self.write(cr, uid, res.keys(), {
'invoice_state': 'invoiced',
}, context=context)
return res
def test_done(self, cr, uid, ids, context=None):
""" Test whether the move lines are done or not.
@return: True or False
"""
ok = False
for pick in self.browse(cr, uid, ids, context=context):
if not pick.move_lines:
return True
for move in pick.move_lines:
if move.state not in ('cancel','done'):
return False
if move.state=='done':
ok = True
return ok
def test_cancel(self, cr, uid, ids, context=None):
""" Test whether the move lines are canceled or not.
@return: True or False
"""
for pick in self.browse(cr, uid, ids, context=context):
for move in pick.move_lines:
if move.state not in ('cancel',):
return False
return True
def allow_cancel(self, cr, uid, ids, context=None):
for pick in self.browse(cr, uid, ids, context=context):
if not pick.move_lines:
return True
for move in pick.move_lines:
if move.state == 'done':
raise osv.except_osv(_('Error!'), _('You cannot cancel the picking as some moves have been done. You should cancel the picking lines.'))
return True
def unlink(self, cr, uid, ids, context=None):
move_obj = self.pool.get('stock.move')
if context is None:
context = {}
for pick in self.browse(cr, uid, ids, context=context):
if pick.state in ['done','cancel']:
raise osv.except_osv(_('Error!'), _('You cannot remove the picking which is in %s state!')%(pick.state,))
else:
ids2 = [move.id for move in pick.move_lines]
ctx = context.copy()
ctx.update({'call_unlink':True})
if pick.state != 'draft':
#Cancelling the move in order to affect Virtual stock of product
move_obj.action_cancel(cr, uid, ids2, ctx)
#Removing the move
move_obj.unlink(cr, uid, ids2, ctx)
return super(stock_picking, self).unlink(cr, uid, ids, context=context)
# FIXME: needs refactoring, this code is partially duplicated in stock_move.do_partial()!
def do_partial(self, cr, uid, ids, partial_datas, context=None):
""" Makes partial picking and moves done.
@param partial_datas : Dictionary containing details of partial picking
like partner_id, partner_id, delivery_date,
delivery moves with product_id, product_qty, uom
@return: Dictionary of values
"""
if context is None:
context = {}
else:
context = dict(context)
res = {}
move_obj = self.pool.get('stock.move')
product_obj = self.pool.get('product.product')
currency_obj = self.pool.get('res.currency')
uom_obj = self.pool.get('product.uom')
sequence_obj = self.pool.get('ir.sequence')
wf_service = netsvc.LocalService("workflow")
for pick in self.browse(cr, uid, ids, context=context):
new_picking = None
complete, too_many, too_few = [], [], []
move_product_qty, prodlot_ids, product_avail, partial_qty, product_uoms = {}, {}, {}, {}, {}
for move in pick.move_lines:
if move.state in ('done', 'cancel'):
continue
partial_data = partial_datas.get('move%s'%(move.id), {})
product_qty = partial_data.get('product_qty',0.0)
move_product_qty[move.id] = product_qty
product_uom = partial_data.get('product_uom',False)
product_price = partial_data.get('product_price',0.0)
product_currency = partial_data.get('product_currency',False)
prodlot_id = partial_data.get('prodlot_id')
prodlot_ids[move.id] = prodlot_id
product_uoms[move.id] = product_uom
partial_qty[move.id] = uom_obj._compute_qty(cr, uid, product_uoms[move.id], product_qty, move.product_uom.id)
if move.product_qty == partial_qty[move.id]:
complete.append(move)
elif move.product_qty > partial_qty[move.id]:
too_few.append(move)
else:
too_many.append(move)
# Average price computation
if (pick.type == 'in') and (move.product_id.cost_method == 'average'):
product = product_obj.browse(cr, uid, move.product_id.id)
move_currency_id = move.company_id.currency_id.id
context['currency_id'] = move_currency_id
qty = uom_obj._compute_qty(cr, uid, product_uom, product_qty, product.uom_id.id)
if product.id in product_avail:
product_avail[product.id] += qty
else:
product_avail[product.id] = product.qty_available
if qty > 0:
new_price = currency_obj.compute(cr, uid, product_currency,
move_currency_id, product_price)
new_price = uom_obj._compute_price(cr, uid, product_uom, new_price,
product.uom_id.id)
if product.qty_available <= 0:
new_std_price = new_price
else:
# Get the standard price
amount_unit = product.price_get('standard_price', context=context)[product.id]
new_std_price = ((amount_unit * product_avail[product.id])\
+ (new_price * qty))/(product_avail[product.id] + qty)
# Write the field according to price type field
product_obj.write(cr, uid, [product.id], {'standard_price': new_std_price})
# Record the values that were chosen in the wizard, so they can be
# used for inventory valuation if real-time valuation is enabled.
move_obj.write(cr, uid, [move.id],
{'price_unit': product_price,
'price_currency_id': product_currency})
for move in too_few:
product_qty = move_product_qty[move.id]
if not new_picking:
new_picking_name = pick.name
self.write(cr, uid, [pick.id],
{'name': sequence_obj.get(cr, uid,
'stock.picking.%s'%(pick.type)),
})
new_picking = self.copy(cr, uid, pick.id,
{
'name': new_picking_name,
'move_lines' : [],
'state':'draft',
})
if product_qty != 0:
defaults = {
'product_qty' : product_qty,
'product_uos_qty': product_qty, #TODO: put correct uos_qty
'picking_id' : new_picking,
'state': 'assigned',
'move_dest_id': False,
'price_unit': move.price_unit,
'product_uom': product_uoms[move.id]
}
prodlot_id = prodlot_ids[move.id]
if prodlot_id:
defaults.update(prodlot_id=prodlot_id)
move_obj.copy(cr, uid, move.id, defaults)
move_obj.write(cr, uid, [move.id],
{
'product_qty': move.product_qty - partial_qty[move.id],
'product_uos_qty': move.product_qty - partial_qty[move.id], #TODO: put correct uos_qty
'prodlot_id': False,
'tracking_id': False,
})
if new_picking:
move_obj.write(cr, uid, [c.id for c in complete], {'picking_id': new_picking})
for move in complete:
defaults = {'product_uom': product_uoms[move.id], 'product_qty': move_product_qty[move.id]}
if prodlot_ids.get(move.id):
defaults.update({'prodlot_id': prodlot_ids[move.id]})
move_obj.write(cr, uid, [move.id], defaults)
for move in too_many:
product_qty = move_product_qty[move.id]
defaults = {
'product_qty' : product_qty,
'product_uos_qty': product_qty, #TODO: put correct uos_qty
'product_uom': product_uoms[move.id]
}
prodlot_id = prodlot_ids.get(move.id)
if prodlot_ids.get(move.id):
defaults.update(prodlot_id=prodlot_id)
if new_picking:
defaults.update(picking_id=new_picking)
move_obj.write(cr, uid, [move.id], defaults)
# At first we confirm the new picking (if necessary)
if new_picking:
wf_service.trg_validate(uid, 'stock.picking', new_picking, 'button_confirm', cr)
# Then we finish the good picking
self.write(cr, uid, [pick.id], {'backorder_id': new_picking})
self.action_move(cr, uid, [new_picking], context=context)
wf_service.trg_validate(uid, 'stock.picking', new_picking, 'button_done', cr)
wf_service.trg_write(uid, 'stock.picking', pick.id, cr)
delivered_pack_id = new_picking
back_order_name = self.browse(cr, uid, delivered_pack_id, context=context).name
self.message_post(cr, uid, ids, body=_("Back order <em>%s</em> has been <b>created</b>.") % (back_order_name), context=context)
else:
self.action_move(cr, uid, [pick.id], context=context)
wf_service.trg_validate(uid, 'stock.picking', pick.id, 'button_done', cr)
delivered_pack_id = pick.id
delivered_pack = self.browse(cr, uid, delivered_pack_id, context=context)
res[pick.id] = {'delivered_picking': delivered_pack.id or False}
return res
# views associated to each picking type
_VIEW_LIST = {
'out': 'view_picking_out_form',
'in': 'view_picking_in_form',
'internal': 'view_picking_form',
}
def _get_view_id(self, cr, uid, type):
"""Get the view id suiting the given type
@param type: the picking type as a string
@return: view i, or False if no view found
"""
res = self.pool.get('ir.model.data').get_object_reference(cr, uid,
'stock', self._VIEW_LIST.get(type, 'view_picking_form'))
return res and res[1] or False
class stock_production_lot(osv.osv):
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
reads = self.read(cr, uid, ids, ['name', 'prefix', 'ref'], context)
res = []
for record in reads:
name = record['name']
prefix = record['prefix']
if prefix:
name = prefix + '/' + name
if record['ref']:
name = '%s [%s]' % (name, record['ref'])
res.append((record['id'], name))
return res
def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=100):
args = args or []
ids = []
if name:
ids = self.search(cr, uid, [('prefix', '=', name)] + args, limit=limit, context=context)
if not ids:
ids = self.search(cr, uid, [('name', operator, name)] + args, limit=limit, context=context)
else:
ids = self.search(cr, uid, args, limit=limit, context=context)
return self.name_get(cr, uid, ids, context)
_name = 'stock.production.lot'
_description = 'Serial Number'
def _get_stock(self, cr, uid, ids, field_name, arg, context=None):
""" Gets stock of products for locations
@return: Dictionary of values
"""
if context is None:
context = {}
if 'location_id' not in context:
locations = self.pool.get('stock.location').search(cr, uid, [('usage', '=', 'internal')], context=context)
else:
locations = context['location_id'] and [context['location_id']] or []
if isinstance(ids, (int, long)):
ids = [ids]
res = {}.fromkeys(ids, 0.0)
if locations:
cr.execute('''select
prodlot_id,
sum(qty)
from
stock_report_prodlots
where
location_id IN %s and prodlot_id IN %s group by prodlot_id''',(tuple(locations),tuple(ids),))
res.update(dict(cr.fetchall()))
return res
def _stock_search(self, cr, uid, obj, name, args, context=None):
""" Searches Ids of products
@return: Ids of locations
"""
locations = self.pool.get('stock.location').search(cr, uid, [('usage', '=', 'internal')])
cr.execute('''select
prodlot_id,
sum(qty)
from
stock_report_prodlots
where
location_id IN %s group by prodlot_id
having sum(qty) '''+ str(args[0][1]) + str(args[0][2]),(tuple(locations),))
res = cr.fetchall()
ids = [('id', 'in', map(lambda x: x[0], res))]
return ids
_columns = {
'name': fields.char('Serial Number', size=64, required=True, help="Unique Serial Number, will be displayed as: PREFIX/SERIAL [INT_REF]"),
'ref': fields.char('Internal Reference', size=256, help="Internal reference number in case it differs from the manufacturer's serial number"),
'prefix': fields.char('Prefix', size=64, help="Optional prefix to prepend when displaying this serial number: PREFIX/SERIAL [INT_REF]"),
'product_id': fields.many2one('product.product', 'Product', required=True, domain=[('type', '<>', 'service')]),
'date': fields.datetime('Creation Date', required=True),
'stock_available': fields.function(_get_stock, fnct_search=_stock_search, type="float", string="Available", select=True,
help="Current quantity of products with this Serial Number available in company warehouses",
digits_compute=dp.get_precision('Product Unit of Measure')),
'revisions': fields.one2many('stock.production.lot.revision', 'lot_id', 'Revisions'),
'company_id': fields.many2one('res.company', 'Company', select=True),
'move_ids': fields.one2many('stock.move', 'prodlot_id', 'Moves for this serial number', readonly=True),
}
_defaults = {
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'name': lambda x, y, z, c: x.pool.get('ir.sequence').get(y, z, 'stock.lot.serial'),
'product_id': lambda x, y, z, c: c.get('product_id', False),
}
_sql_constraints = [
('name_ref_uniq', 'unique (name, ref)', 'The combination of Serial Number and internal reference must be unique !'),
]
def action_traceability(self, cr, uid, ids, context=None):
""" It traces the information of a product
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of IDs selected
@param context: A standard dictionary
@return: A dictionary of values
"""
value=self.pool.get('action.traceability').action_traceability(cr,uid,ids,context)
return value
def copy(self, cr, uid, id, default=None, context=None):
context = context or {}
default = default and default.copy() or {}
default.update(date=time.strftime('%Y-%m-%d %H:%M:%S'), move_ids=[])
return super(stock_production_lot, self).copy(cr, uid, id, default=default, context=context)
stock_production_lot()
class stock_production_lot_revision(osv.osv):
_name = 'stock.production.lot.revision'
_description = 'Serial Number Revision'
_columns = {
'name': fields.char('Revision Name', size=64, required=True),
'description': fields.text('Description'),
'date': fields.date('Revision Date'),
'indice': fields.char('Revision Number', size=16),
'author_id': fields.many2one('res.users', 'Author'),
'lot_id': fields.many2one('stock.production.lot', 'Serial Number', select=True, ondelete='cascade'),
'company_id': fields.related('lot_id','company_id',type='many2one',relation='res.company',string='Company', store=True, readonly=True),
}
_defaults = {
'author_id': lambda x, y, z, c: z,
'date': fields.date.context_today,
}
stock_production_lot_revision()
# ----------------------------------------------------
# Move
# ----------------------------------------------------
#
# Fields:
# location_dest_id is only used for predicting futur stocks
#
class stock_move(osv.osv):
def _getSSCC(self, cr, uid, context=None):
cr.execute('select id from stock_tracking where create_uid=%s order by id desc limit 1', (uid,))
res = cr.fetchone()
return (res and res[0]) or False
_name = "stock.move"
_description = "Stock Move"
_order = 'date_expected desc, id'
_log_create = False
def action_partial_move(self, cr, uid, ids, context=None):
if context is None: context = {}
if context.get('active_model') != self._name:
context.update(active_ids=ids, active_model=self._name)
partial_id = self.pool.get("stock.partial.move").create(
cr, uid, {}, context=context)
return {
'name':_("Products to Process"),
'view_mode': 'form',
'view_id': False,
'view_type': 'form',
'res_model': 'stock.partial.move',
'res_id': partial_id,
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'new',
'domain': '[]',
'context': context
}
def name_get(self, cr, uid, ids, context=None):
res = []
for line in self.browse(cr, uid, ids, context=context):
name = line.location_id.name+' > '+line.location_dest_id.name
# optional prefixes
if line.product_id.code:
name = line.product_id.code + ': ' + name
if line.picking_id.origin:
name = line.picking_id.origin + '/ ' + name
res.append((line.id, name))
return res
def _check_tracking(self, cr, uid, ids, context=None):
""" Checks if serial number is assigned to stock move or not.
@return: True or False
"""
for move in self.browse(cr, uid, ids, context=context):
if not move.prodlot_id and \
(move.state == 'done' and \
( \
(move.product_id.track_production and move.location_id.usage == 'production') or \
(move.product_id.track_production and move.location_dest_id.usage == 'production') or \
(move.product_id.track_incoming and move.location_id.usage == 'supplier') or \
(move.product_id.track_outgoing and move.location_dest_id.usage == 'customer') or \
(move.product_id.track_incoming and move.location_id.usage == 'inventory') \
)):
return False
return True
def _check_product_lot(self, cr, uid, ids, context=None):
""" Checks whether move is done or not and production lot is assigned to that move.
@return: True or False
"""
for move in self.browse(cr, uid, ids, context=context):
if move.prodlot_id and move.state == 'done' and (move.prodlot_id.product_id.id != move.product_id.id):
return False
return True
_columns = {
'name': fields.char('Description', required=True, select=True),
'priority': fields.selection([('0', 'Not urgent'), ('1', 'Urgent')], 'Priority'),
'create_date': fields.datetime('Creation Date', readonly=True, select=True),
'date': fields.datetime('Date', required=True, select=True, help="Move date: scheduled date until move is done, then date of actual move processing", states={'done': [('readonly', True)]}),
'date_expected': fields.datetime('Scheduled Date', states={'done': [('readonly', True)]},required=True, select=True, help="Scheduled date for the processing of this move"),
'product_id': fields.many2one('product.product', 'Product', required=True, select=True, domain=[('type','<>','service')],states={'done': [('readonly', True)]}),
'product_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'),
required=True,states={'done': [('readonly', True)]},
help="This is the quantity of products from an inventory "
"point of view. For moves in the state 'done', this is the "
"quantity of products that were actually moved. For other "
"moves, this is the quantity of product that is planned to "
"be moved. Lowering this quantity does not generate a "
"backorder. Changing this quantity on assigned moves affects "
"the product reservation, and should be done with care."
),
'product_uom': fields.many2one('product.uom', 'Unit of Measure', required=True,states={'done': [('readonly', True)]}),
'product_uos_qty': fields.float('Quantity (UOS)', digits_compute=dp.get_precision('Product Unit of Measure'), states={'done': [('readonly', True)]}),
'product_uos': fields.many2one('product.uom', 'Product UOS', states={'done': [('readonly', True)]}),
'product_packaging': fields.many2one('product.packaging', 'Packaging', help="It specifies attributes of packaging like type, quantity of packaging,etc."),
'location_id': fields.many2one('stock.location', 'Source Location', required=True, select=True,states={'done': [('readonly', True)]}, help="Sets a location if you produce at a fixed location. This can be a partner location if you subcontract the manufacturing operations."),
'location_dest_id': fields.many2one('stock.location', 'Destination Location', required=True,states={'done': [('readonly', True)]}, select=True, help="Location where the system will stock the finished products."),
'partner_id': fields.many2one('res.partner', 'Destination Address ', states={'done': [('readonly', True)]}, help="Optional address where goods are to be delivered, specifically used for allotment"),
'prodlot_id': fields.many2one('stock.production.lot', 'Serial Number', states={'done': [('readonly', True)]}, help="Serial number is used to put a serial number on the production", select=True),
'tracking_id': fields.many2one('stock.tracking', 'Pack', select=True, states={'done': [('readonly', True)]}, help="Logistical shipping unit: pallet, box, pack ..."),
'auto_validate': fields.boolean('Auto Validate'),
'move_dest_id': fields.many2one('stock.move', 'Destination Move', help="Optional: next stock move when chaining them", select=True),
'move_history_ids': fields.many2many('stock.move', 'stock_move_history_ids', 'parent_id', 'child_id', 'Move History (child moves)'),
'move_history_ids2': fields.many2many('stock.move', 'stock_move_history_ids', 'child_id', 'parent_id', 'Move History (parent moves)'),
'picking_id': fields.many2one('stock.picking', 'Reference', select=True,states={'done': [('readonly', True)]}),
'note': fields.text('Notes'),
'state': fields.selection([('draft', 'New'),
('cancel', 'Cancelled'),
('waiting', 'Waiting Another Move'),
('confirmed', 'Waiting Availability'),
('assigned', 'Available'),
('done', 'Done'),
], 'Status', readonly=True, select=True,
help= "* New: When the stock move is created and not yet confirmed.\n"\
"* Waiting Another Move: This state can be seen when a move is waiting for another one, for example in a chained flow.\n"\
"* Waiting Availability: This state is reached when the procurement resolution is not straight forward. It may need the scheduler to run, a component to me manufactured...\n"\
"* Available: When products are reserved, it is set to \'Available\'.\n"\
"* Done: When the shipment is processed, the state is \'Done\'."),
'price_unit': fields.float('Unit Price', digits_compute= dp.get_precision('Account'), help="Technical field used to record the product cost set by the user during a picking confirmation (when average price costing method is used)"),
'price_currency_id': fields.many2one('res.currency', 'Currency for average price', help="Technical field used to record the currency chosen by the user during a picking confirmation (when average price costing method is used)"),
'company_id': fields.many2one('res.company', 'Company', required=True, select=True),
'backorder_id': fields.related('picking_id','backorder_id',type='many2one', relation="stock.picking", string="Back Order of", select=True),
'origin': fields.related('picking_id','origin',type='char', size=64, relation="stock.picking", string="Source", store=True),
# used for colors in tree views:
'scrapped': fields.related('location_dest_id','scrap_location',type='boolean',relation='stock.location',string='Scrapped', readonly=True),
'type': fields.related('picking_id', 'type', type='selection', selection=[('out', 'Sending Goods'), ('in', 'Getting Goods'), ('internal', 'Internal')], string='Shipping Type'),
}
def _check_location(self, cr, uid, ids, context=None):
for record in self.browse(cr, uid, ids, context=context):
if (record.state=='done') and (record.location_id.usage == 'view'):
raise osv.except_osv(_('Error'), _('You cannot move product %s from a location of type view %s.')% (record.product_id.name, record.location_id.name))
if (record.state=='done') and (record.location_dest_id.usage == 'view' ):
raise osv.except_osv(_('Error'), _('You cannot move product %s to a location of type view %s.')% (record.product_id.name, record.location_dest_id.name))
return True
_constraints = [
(_check_tracking,
'You must assign a serial number for this product.',
['prodlot_id']),
(_check_location, 'You cannot move products from or to a location of the type view.',
['location_id','location_dest_id']),
(_check_product_lot,
'You try to assign a lot which is not from the same product.',
['prodlot_id'])]
def _default_location_destination(self, cr, uid, context=None):
""" Gets default address of partner for destination location
@return: Address id or False
"""
mod_obj = self.pool.get('ir.model.data')
picking_type = context.get('picking_type')
location_id = False
if context is None:
context = {}
if context.get('move_line', []):
if context['move_line'][0]:
if isinstance(context['move_line'][0], (tuple, list)):
location_id = context['move_line'][0][2] and context['move_line'][0][2].get('location_dest_id',False)
else:
move_list = self.pool.get('stock.move').read(cr, uid, context['move_line'][0], ['location_dest_id'])
location_id = move_list and move_list['location_dest_id'][0] or False
elif context.get('address_out_id', False):
property_out = self.pool.get('res.partner').browse(cr, uid, context['address_out_id'], context).property_stock_customer
location_id = property_out and property_out.id or False
else:
location_xml_id = False
if picking_type in ('in', 'internal'):
location_xml_id = 'stock_location_stock'
elif picking_type == 'out':
location_xml_id = 'stock_location_customers'
if location_xml_id:
location_model, location_id = mod_obj.get_object_reference(cr, uid, 'stock', location_xml_id)
return location_id
def _default_location_source(self, cr, uid, context=None):
""" Gets default address of partner for source location
@return: Address id or False
"""
mod_obj = self.pool.get('ir.model.data')
picking_type = context.get('picking_type')
location_id = False
if context is None:
context = {}
if context.get('move_line', []):
try:
location_id = context['move_line'][0][2]['location_id']
except:
pass
elif context.get('address_in_id', False):
part_obj_add = self.pool.get('res.partner').browse(cr, uid, context['address_in_id'], context=context)
if part_obj_add:
location_id = part_obj_add.property_stock_supplier.id
else:
location_xml_id = False
if picking_type == 'in':
location_xml_id = 'stock_location_suppliers'
elif picking_type in ('out', 'internal'):
location_xml_id = 'stock_location_stock'
if location_xml_id:
location_model, location_id = mod_obj.get_object_reference(cr, uid, 'stock', location_xml_id)
return location_id
def _default_destination_address(self, cr, uid, context=None):
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
return user.company_id.partner_id.id
def _default_move_type(self, cr, uid, context=None):
""" Gets default type of move
@return: type
"""
if context is None:
context = {}
picking_type = context.get('picking_type')
type = 'internal'
if picking_type == 'in':
type = 'in'
elif picking_type == 'out':
type = 'out'
return type
_defaults = {
'location_id': _default_location_source,
'location_dest_id': _default_location_destination,
'partner_id': _default_destination_address,
'type': _default_move_type,
'state': 'draft',
'priority': '1',
'product_qty': 1.0,
'scrapped' : False,
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.move', context=c),
'date_expected': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
}
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
if uid != 1:
frozen_fields = set(['product_qty', 'product_uom', 'product_uos_qty', 'product_uos', 'location_id', 'location_dest_id', 'product_id'])
for move in self.browse(cr, uid, ids, context=context):
if move.state == 'done':
if frozen_fields.intersection(vals):
raise osv.except_osv(_('Operation forbidden !'),
_('Quantities, Units of Measure, Products and Locations cannot be modified on stock moves that have already been processed (except by the Administrator).'))
return super(stock_move, self).write(cr, uid, ids, vals, context=context)
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
default = default.copy()
default.update({'move_history_ids2': [], 'move_history_ids': []})
return super(stock_move, self).copy(cr, uid, id, default, context=context)
def _auto_init(self, cursor, context=None):
res = super(stock_move, self)._auto_init(cursor, context=context)
cursor.execute('SELECT indexname \
FROM pg_indexes \
WHERE indexname = \'stock_move_location_id_location_dest_id_product_id_state\'')
if not cursor.fetchone():
cursor.execute('CREATE INDEX stock_move_location_id_location_dest_id_product_id_state \
ON stock_move (product_id, state, location_id, location_dest_id)')
return res
def onchange_lot_id(self, cr, uid, ids, prodlot_id=False, product_qty=False,
loc_id=False, product_id=False, uom_id=False, context=None):
""" On change of production lot gives a warning message.
@param prodlot_id: Changed production lot id
@param product_qty: Quantity of product
@param loc_id: Location id
@param product_id: Product id
@return: Warning message
"""
if not prodlot_id or not loc_id:
return {}
ctx = context and context.copy() or {}
ctx['location_id'] = loc_id
ctx.update({'raise-exception': True})
uom_obj = self.pool.get('product.uom')
product_obj = self.pool.get('product.product')
product_uom = product_obj.browse(cr, uid, product_id, context=ctx).uom_id
prodlot = self.pool.get('stock.production.lot').browse(cr, uid, prodlot_id, context=ctx)
location = self.pool.get('stock.location').browse(cr, uid, loc_id, context=ctx)
uom = uom_obj.browse(cr, uid, uom_id, context=ctx)
amount_actual = uom_obj._compute_qty_obj(cr, uid, product_uom, prodlot.stock_available, uom, context=ctx)
warning = {}
if (location.usage == 'internal') and (product_qty > (amount_actual or 0.0)):
warning = {
'title': _('Insufficient Stock for Serial Number !'),
'message': _('You are moving %.2f %s but only %.2f %s available for this serial number.') % (product_qty, uom.name, amount_actual, uom.name)
}
return {'warning': warning}
def onchange_quantity(self, cr, uid, ids, product_id, product_qty,
product_uom, product_uos):
""" On change of product quantity finds UoM and UoS quantities
@param product_id: Product id
@param product_qty: Changed Quantity of product
@param product_uom: Unit of measure of product
@param product_uos: Unit of sale of product
@return: Dictionary of values
"""
result = {
'product_uos_qty': 0.00
}
warning = {}
if (not product_id) or (product_qty <=0.0):
result['product_qty'] = 0.0
return {'value': result}
product_obj = self.pool.get('product.product')
uos_coeff = product_obj.read(cr, uid, product_id, ['uos_coeff'])
# Warn if the quantity was decreased
if ids:
for move in self.read(cr, uid, ids, ['product_qty']):
if product_qty < move['product_qty']:
warning.update({
'title': _('Information'),
'message': _("By changing this quantity here, you accept the "
"new quantity as complete: OpenERP will not "
"automatically generate a back order.") })
break
if product_uos and product_uom and (product_uom != product_uos):
result['product_uos_qty'] = product_qty * uos_coeff['uos_coeff']
else:
result['product_uos_qty'] = product_qty
return {'value': result, 'warning': warning}
def onchange_uos_quantity(self, cr, uid, ids, product_id, product_uos_qty,
product_uos, product_uom):
""" On change of product quantity finds UoM and UoS quantities
@param product_id: Product id
@param product_uos_qty: Changed UoS Quantity of product
@param product_uom: Unit of measure of product
@param product_uos: Unit of sale of product
@return: Dictionary of values
"""
result = {
'product_qty': 0.00
}
warning = {}
if (not product_id) or (product_uos_qty <=0.0):
result['product_uos_qty'] = 0.0
return {'value': result}
product_obj = self.pool.get('product.product')
uos_coeff = product_obj.read(cr, uid, product_id, ['uos_coeff'])
# Warn if the quantity was decreased
for move in self.read(cr, uid, ids, ['product_uos_qty']):
if product_uos_qty < move['product_uos_qty']:
warning.update({
'title': _('Warning: No Back Order'),
'message': _("By changing the quantity here, you accept the "
"new quantity as complete: OpenERP will not "
"automatically generate a Back Order.") })
break
if product_uos and product_uom and (product_uom != product_uos):
result['product_qty'] = product_uos_qty / uos_coeff['uos_coeff']
else:
result['product_qty'] = product_uos_qty
return {'value': result, 'warning': warning}
def onchange_product_id(self, cr, uid, ids, prod_id=False, loc_id=False,
loc_dest_id=False, partner_id=False):
""" On change of product id, if finds UoM, UoS, quantity and UoS quantity.
@param prod_id: Changed Product id
@param loc_id: Source location id
@param loc_dest_id: Destination location id
@param partner_id: Address id of partner
@return: Dictionary of values
"""
if not prod_id:
return {}
lang = False
if partner_id:
addr_rec = self.pool.get('res.partner').browse(cr, uid, partner_id)
if addr_rec:
lang = addr_rec and addr_rec.lang or False
ctx = {'lang': lang}
product = self.pool.get('product.product').browse(cr, uid, [prod_id], context=ctx)[0]
uos_id = product.uos_id and product.uos_id.id or False
result = {
'product_uom': product.uom_id.id,
'product_uos': uos_id,
'product_qty': 1.00,
'product_uos_qty' : self.pool.get('stock.move').onchange_quantity(cr, uid, ids, prod_id, 1.00, product.uom_id.id, uos_id)['value']['product_uos_qty'],
'prodlot_id' : False,
}
if not ids:
result['name'] = product.partner_ref
if loc_id:
result['location_id'] = loc_id
if loc_dest_id:
result['location_dest_id'] = loc_dest_id
return {'value': result}
def onchange_move_type(self, cr, uid, ids, type, context=None):
""" On change of move type gives sorce and destination location.
@param type: Move Type
@return: Dictionary of values
"""
mod_obj = self.pool.get('ir.model.data')
location_source_id = 'stock_location_stock'
location_dest_id = 'stock_location_stock'
if type == 'in':
location_source_id = 'stock_location_suppliers'
location_dest_id = 'stock_location_stock'
elif type == 'out':
location_source_id = 'stock_location_stock'
location_dest_id = 'stock_location_customers'
source_location = mod_obj.get_object_reference(cr, uid, 'stock', location_source_id)
dest_location = mod_obj.get_object_reference(cr, uid, 'stock', location_dest_id)
return {'value':{'location_id': source_location and source_location[1] or False, 'location_dest_id': dest_location and dest_location[1] or False}}
def onchange_date(self, cr, uid, ids, date, date_expected, context=None):
""" On change of Scheduled Date gives a Move date.
@param date_expected: Scheduled Date
@param date: Move Date
@return: Move Date
"""
if not date_expected:
date_expected = time.strftime('%Y-%m-%d %H:%M:%S')
return {'value':{'date': date_expected}}
def _chain_compute(self, cr, uid, moves, context=None):
""" Finds whether the location has chained location type or not.
@param moves: Stock moves
@return: Dictionary containing destination location with chained location type.
"""
result = {}
for m in moves:
dest = self.pool.get('stock.location').chained_location_get(
cr,
uid,
m.location_dest_id,
m.picking_id and m.picking_id.partner_id and m.picking_id.partner_id,
m.product_id,
context
)
if dest:
if dest[1] == 'transparent':
newdate = (datetime.strptime(m.date, '%Y-%m-%d %H:%M:%S') + relativedelta(days=dest[2] or 0)).strftime('%Y-%m-%d')
self.write(cr, uid, [m.id], {
'date': newdate,
'location_dest_id': dest[0].id})
if m.picking_id and (dest[3] or dest[5]):
self.pool.get('stock.picking').write(cr, uid, [m.picking_id.id], {
'stock_journal_id': m.picking_id.stock_journal_id.id or dest[3],
'type': dest[5] or m.picking_id.type
}, context=context)
m.location_dest_id = dest[0]
res2 = self._chain_compute(cr, uid, [m], context=context)
for pick_id in res2.keys():
result.setdefault(pick_id, [])
result[pick_id] += res2[pick_id]
else:
result.setdefault(m.picking_id, [])
result[m.picking_id].append( (m, dest) )
return result
def _prepare_chained_picking(self, cr, uid, picking_name, picking, picking_type, moves_todo, context=None):
"""Prepare the definition (values) to create a new chained picking.
:param str picking_name: desired new picking name
:param browse_record picking: source picking (being chained to)
:param str picking_type: desired new picking type
:param list moves_todo: specification of the stock moves to be later included in this
picking, in the form::
[[move, (dest_location, auto_packing, chained_delay, chained_journal,
chained_company_id, chained_picking_type)],
...
]
See also :meth:`stock_location.chained_location_get`.
"""
res_company = self.pool.get('res.company')
return {
'name': picking_name,
'origin': tools.ustr(picking.origin or ''),
'type': picking_type,
'note': picking.note,
'move_type': picking.move_type,
'auto_picking': moves_todo[0][1][1] == 'auto',
'stock_journal_id': moves_todo[0][1][3],
'company_id': moves_todo[0][1][4] or res_company._company_default_get(cr, uid, 'stock.company', context=context),
'partner_id': picking.partner_id.id,
'invoice_state': 'none',
'date': picking.date,
}
def _create_chained_picking(self, cr, uid, picking_name, picking, picking_type, moves_todo, context=None):
picking_obj = self.pool.get('stock.picking')
return picking_obj.create(cr, uid, self._prepare_chained_picking(cr, uid, picking_name, picking, picking_type, moves_todo, context=context))
def create_chained_picking(self, cr, uid, moves, context=None):
res_obj = self.pool.get('res.company')
location_obj = self.pool.get('stock.location')
move_obj = self.pool.get('stock.move')
wf_service = netsvc.LocalService("workflow")
new_moves = []
if context is None:
context = {}
seq_obj = self.pool.get('ir.sequence')
for picking, todo in self._chain_compute(cr, uid, moves, context=context).items():
ptype = todo[0][1][5] and todo[0][1][5] or location_obj.picking_type_get(cr, uid, todo[0][0].location_dest_id, todo[0][1][0])
if picking:
# name of new picking according to its type
new_pick_name = seq_obj.get(cr, uid, 'stock.picking.' + ptype)
pickid = self._create_chained_picking(cr, uid, new_pick_name, picking, ptype, todo, context=context)
# Need to check name of old picking because it always considers picking as "OUT" when created from Sales Order
old_ptype = location_obj.picking_type_get(cr, uid, picking.move_lines[0].location_id, picking.move_lines[0].location_dest_id)
if old_ptype != picking.type:
old_pick_name = seq_obj.get(cr, uid, 'stock.picking.' + old_ptype)
self.pool.get('stock.picking').write(cr, uid, [picking.id], {'name': old_pick_name, 'type': old_ptype}, context=context)
else:
pickid = False
for move, (loc, dummy, delay, dummy, company_id, ptype, invoice_state) in todo:
new_id = move_obj.copy(cr, uid, move.id, {
'location_id': move.location_dest_id.id,
'location_dest_id': loc.id,
'date': time.strftime('%Y-%m-%d'),
'picking_id': pickid,
'state': 'waiting',
'company_id': company_id or res_obj._company_default_get(cr, uid, 'stock.company', context=context) ,
'move_history_ids': [],
'date_expected': (datetime.strptime(move.date, '%Y-%m-%d %H:%M:%S') + relativedelta(days=delay or 0)).strftime('%Y-%m-%d'),
'move_history_ids2': []}
)
move_obj.write(cr, uid, [move.id], {
'move_dest_id': new_id,
'move_history_ids': [(4, new_id)]
})
new_moves.append(self.browse(cr, uid, [new_id])[0])
if pickid:
wf_service.trg_validate(uid, 'stock.picking', pickid, 'button_confirm', cr)
if new_moves:
new_moves += self.create_chained_picking(cr, uid, new_moves, context)
return new_moves
def action_confirm(self, cr, uid, ids, context=None):
""" Confirms stock move.
@return: List of ids.
"""
moves = self.browse(cr, uid, ids, context=context)
self.write(cr, uid, ids, {'state': 'confirmed'})
self.create_chained_picking(cr, uid, moves, context)
return []
def action_assign(self, cr, uid, ids, *args):
""" Changes state to confirmed or waiting.
@return: List of values
"""
todo = []
for move in self.browse(cr, uid, ids):
if move.state in ('confirmed', 'waiting'):
todo.append(move.id)
res = self.check_assign(cr, uid, todo)
return res
def force_assign(self, cr, uid, ids, context=None):
""" Changes the state to assigned.
@return: True
"""
self.write(cr, uid, ids, {'state': 'assigned'})
wf_service = netsvc.LocalService('workflow')
for move in self.browse(cr, uid, ids, context):
if move.picking_id:
wf_service.trg_write(uid, 'stock.picking', move.picking_id.id, cr)
return True
def cancel_assign(self, cr, uid, ids, context=None):
""" Changes the state to confirmed.
@return: True
"""
self.write(cr, uid, ids, {'state': 'confirmed'})
# fix for bug lp:707031
# called write of related picking because changing move availability does
# not trigger workflow of picking in order to change the state of picking
wf_service = netsvc.LocalService('workflow')
for move in self.browse(cr, uid, ids, context):
if move.picking_id:
wf_service.trg_write(uid, 'stock.picking', move.picking_id.id, cr)
return True
#
# Duplicate stock.move
#
def check_assign(self, cr, uid, ids, context=None):
""" Checks the product type and accordingly writes the state.
@return: No. of moves done
"""
done = []
count = 0
pickings = {}
if context is None:
context = {}
for move in self.browse(cr, uid, ids, context=context):
if move.product_id.type == 'consu' or move.location_id.usage == 'supplier':
if move.state in ('confirmed', 'waiting'):
done.append(move.id)
pickings[move.picking_id.id] = 1
continue
if move.state in ('confirmed', 'waiting'):
# Important: we must pass lock=True to _product_reserve() to avoid race conditions and double reservations
res = self.pool.get('stock.location')._product_reserve(cr, uid, [move.location_id.id], move.product_id.id, move.product_qty, {'uom': move.product_uom.id}, lock=True)
if res:
#_product_available_test depends on the next status for correct functioning
#the test does not work correctly if the same product occurs multiple times
#in the same order. This is e.g. the case when using the button 'split in two' of
#the stock outgoing form
self.write(cr, uid, [move.id], {'state':'assigned'})
done.append(move.id)
pickings[move.picking_id.id] = 1
r = res.pop(0)
product_uos_qty = self.pool.get('stock.move').onchange_quantity(cr, uid, ids, move.product_id.id, r[0], move.product_id.uom_id.id, move.product_id.uos_id.id)['value']['product_uos_qty']
cr.execute('update stock_move set location_id=%s, product_qty=%s, product_uos_qty=%s where id=%s', (r[1], r[0],product_uos_qty, move.id))
while res:
r = res.pop(0)
move_id = self.copy(cr, uid, move.id, {'product_uos_qty': product_uos_qty, 'product_qty': r[0], 'location_id': r[1]})
done.append(move_id)
if done:
count += len(done)
self.write(cr, uid, done, {'state': 'assigned'})
if count:
for pick_id in pickings:
wf_service = netsvc.LocalService("workflow")
wf_service.trg_write(uid, 'stock.picking', pick_id, cr)
return count
def setlast_tracking(self, cr, uid, ids, context=None):
tracking_obj = self.pool.get('stock.tracking')
picking = self.browse(cr, uid, ids, context=context)[0].picking_id
if picking:
last_track = [line.tracking_id.id for line in picking.move_lines if line.tracking_id]
if not last_track:
last_track = tracking_obj.create(cr, uid, {}, context=context)
else:
last_track.sort()
last_track = last_track[-1]
self.write(cr, uid, ids, {'tracking_id': last_track})
return True
#
# Cancel move => cancel others move and pickings
#
def action_cancel(self, cr, uid, ids, context=None):
""" Cancels the moves and if all moves are cancelled it cancels the picking.
@return: True
"""
if not len(ids):
return True
if context is None:
context = {}
pickings = set()
for move in self.browse(cr, uid, ids, context=context):
if move.state in ('confirmed', 'waiting', 'assigned', 'draft'):
if move.picking_id:
pickings.add(move.picking_id.id)
if move.move_dest_id and move.move_dest_id.state == 'waiting':
self.write(cr, uid, [move.move_dest_id.id], {'state': 'assigned'})
if context.get('call_unlink',False) and move.move_dest_id.picking_id:
wf_service = netsvc.LocalService("workflow")
wf_service.trg_write(uid, 'stock.picking', move.move_dest_id.picking_id.id, cr)
self.write(cr, uid, ids, {'state': 'cancel', 'move_dest_id': False})
if not context.get('call_unlink',False):
for pick in self.pool.get('stock.picking').browse(cr, uid, list(pickings), context=context):
if all(move.state == 'cancel' for move in pick.move_lines):
self.pool.get('stock.picking').write(cr, uid, [pick.id], {'state': 'cancel'})
wf_service = netsvc.LocalService("workflow")
for id in ids:
wf_service.trg_trigger(uid, 'stock.move', id, cr)
return True
def _get_accounting_data_for_valuation(self, cr, uid, move, context=None):
"""
Return the accounts and journal to use to post Journal Entries for the real-time
valuation of the move.
:param context: context dictionary that can explicitly mention the company to consider via the 'force_company' key
:raise: osv.except_osv() is any mandatory account or journal is not defined.
"""
product_obj=self.pool.get('product.product')
accounts = product_obj.get_product_accounts(cr, uid, move.product_id.id, context)
if move.location_id.valuation_out_account_id:
acc_src = move.location_id.valuation_out_account_id.id
else:
acc_src = accounts['stock_account_input']
if move.location_dest_id.valuation_in_account_id:
acc_dest = move.location_dest_id.valuation_in_account_id.id
else:
acc_dest = accounts['stock_account_output']
acc_valuation = accounts.get('property_stock_valuation_account_id', False)
journal_id = accounts['stock_journal']
if acc_dest == acc_valuation:
raise osv.except_osv(_('Error!'), _('Cannot create Journal Entry, Output Account of this product and Valuation account on category of this product are same.'))
if acc_src == acc_valuation:
raise osv.except_osv(_('Error!'), _('Cannot create Journal Entry, Input Account of this product and Valuation account on category of this product are same.'))
if not acc_src:
raise osv.except_osv(_('Error!'), _('Please define stock input account for this product or its category: "%s" (id: %d)') % \
(move.product_id.name, move.product_id.id,))
if not acc_dest:
raise osv.except_osv(_('Error!'), _('Please define stock output account for this product or its category: "%s" (id: %d)') % \
(move.product_id.name, move.product_id.id,))
if not journal_id:
raise osv.except_osv(_('Error!'), _('Please define journal on the product category: "%s" (id: %d)') % \
(move.product_id.categ_id.name, move.product_id.categ_id.id,))
if not acc_valuation:
raise osv.except_osv(_('Error!'), _('Please define inventory valuation account on the product category: "%s" (id: %d)') % \
(move.product_id.categ_id.name, move.product_id.categ_id.id,))
return journal_id, acc_src, acc_dest, acc_valuation
def _get_reference_accounting_values_for_valuation(self, cr, uid, move, context=None):
"""
Return the reference amount and reference currency representing the inventory valuation for this move.
These reference values should possibly be converted before being posted in Journals to adapt to the primary
and secondary currencies of the relevant accounts.
"""
product_uom_obj = self.pool.get('product.uom')
# by default the reference currency is that of the move's company
reference_currency_id = move.company_id.currency_id.id
default_uom = move.product_id.uom_id.id
qty = product_uom_obj._compute_qty(cr, uid, move.product_uom.id, move.product_qty, default_uom)
# if product is set to average price and a specific value was entered in the picking wizard,
# we use it
if move.product_id.cost_method == 'average' and move.price_unit:
reference_amount = qty * move.price_unit
reference_currency_id = move.price_currency_id.id or reference_currency_id
# Otherwise we default to the company's valuation price type, considering that the values of the
# valuation field are expressed in the default currency of the move's company.
else:
if context is None:
context = {}
currency_ctx = dict(context, currency_id = move.company_id.currency_id.id)
amount_unit = move.product_id.price_get('standard_price', context=currency_ctx)[move.product_id.id]
reference_amount = amount_unit * qty
return reference_amount, reference_currency_id
def _create_product_valuation_moves(self, cr, uid, move, context=None):
"""
Generate the appropriate accounting moves if the product being moves is subject
to real_time valuation tracking, and the source or destination location is
a transit location or is outside of the company.
"""
if move.product_id.valuation == 'real_time': # FIXME: product valuation should perhaps be a property?
if context is None:
context = {}
src_company_ctx = dict(context,force_company=move.location_id.company_id.id)
dest_company_ctx = dict(context,force_company=move.location_dest_id.company_id.id)
account_moves = []
# Outgoing moves (or cross-company output part)
if move.location_id.company_id \
and (move.location_id.usage == 'internal' and move.location_dest_id.usage != 'internal'\
or move.location_id.company_id != move.location_dest_id.company_id):
journal_id, acc_src, acc_dest, acc_valuation = self._get_accounting_data_for_valuation(cr, uid, move, src_company_ctx)
reference_amount, reference_currency_id = self._get_reference_accounting_values_for_valuation(cr, uid, move, src_company_ctx)
#returning goods to supplier
if move.location_dest_id.usage == 'supplier':
account_moves += [(journal_id, self._create_account_move_line(cr, uid, move, acc_valuation, acc_src, reference_amount, reference_currency_id, context))]
else:
account_moves += [(journal_id, self._create_account_move_line(cr, uid, move, acc_valuation, acc_dest, reference_amount, reference_currency_id, context))]
# Incoming moves (or cross-company input part)
if move.location_dest_id.company_id \
and (move.location_id.usage != 'internal' and move.location_dest_id.usage == 'internal'\
or move.location_id.company_id != move.location_dest_id.company_id):
journal_id, acc_src, acc_dest, acc_valuation = self._get_accounting_data_for_valuation(cr, uid, move, dest_company_ctx)
reference_amount, reference_currency_id = self._get_reference_accounting_values_for_valuation(cr, uid, move, src_company_ctx)
#goods return from customer
if move.location_id.usage == 'customer':
account_moves += [(journal_id, self._create_account_move_line(cr, uid, move, acc_dest, acc_valuation, reference_amount, reference_currency_id, context))]
else:
account_moves += [(journal_id, self._create_account_move_line(cr, uid, move, acc_src, acc_valuation, reference_amount, reference_currency_id, context))]
move_obj = self.pool.get('account.move')
for j_id, move_lines in account_moves:
move_obj.create(cr, uid,
{
'journal_id': j_id,
'line_id': move_lines,
'ref': move.picking_id and move.picking_id.name})
def action_done(self, cr, uid, ids, context=None):
""" Makes the move done and if all moves are done, it will finish the picking.
@return:
"""
picking_ids = []
move_ids = []
wf_service = netsvc.LocalService("workflow")
if context is None:
context = {}
todo = []
for move in self.browse(cr, uid, ids, context=context):
if move.state=="draft":
todo.append(move.id)
if todo:
self.action_confirm(cr, uid, todo, context=context)
todo = []
for move in self.browse(cr, uid, ids, context=context):
if move.state in ['done','cancel']:
continue
move_ids.append(move.id)
if move.picking_id:
picking_ids.append(move.picking_id.id)
if move.move_dest_id.id and (move.state != 'done'):
# Downstream move should only be triggered if this move is the last pending upstream move
other_upstream_move_ids = self.search(cr, uid, [('id','!=',move.id),('state','not in',['done','cancel']),
('move_dest_id','=',move.move_dest_id.id)], context=context)
if not other_upstream_move_ids:
self.write(cr, uid, [move.id], {'move_history_ids': [(4, move.move_dest_id.id)]})
if move.move_dest_id.state in ('waiting', 'confirmed'):
self.force_assign(cr, uid, [move.move_dest_id.id], context=context)
if move.move_dest_id.picking_id:
wf_service.trg_write(uid, 'stock.picking', move.move_dest_id.picking_id.id, cr)
if move.move_dest_id.auto_validate:
self.action_done(cr, uid, [move.move_dest_id.id], context=context)
self._create_product_valuation_moves(cr, uid, move, context=context)
if move.state not in ('confirmed','done','assigned'):
todo.append(move.id)
if todo:
self.action_confirm(cr, uid, todo, context=context)
self.write(cr, uid, move_ids, {'state': 'done', 'date': time.strftime('%Y-%m-%d %H:%M:%S')}, context=context)
for id in move_ids:
wf_service.trg_trigger(uid, 'stock.move', id, cr)
for pick_id in picking_ids:
wf_service.trg_write(uid, 'stock.picking', pick_id, cr)
return True
def _create_account_move_line(self, cr, uid, move, src_account_id, dest_account_id, reference_amount, reference_currency_id, context=None):
"""
Generate the account.move.line values to post to track the stock valuation difference due to the
processing of the given stock move.
"""
# prepare default values considering that the destination accounts have the reference_currency_id as their main currency
partner_id = (move.picking_id.partner_id and move.picking_id.partner_id.id and move.picking_id.partner_id.id) or False
debit_line_vals = {
'name': move.name,
'product_id': move.product_id and move.product_id.id or False,
'quantity': move.product_qty,
'ref': move.picking_id and move.picking_id.name or False,
'date': time.strftime('%Y-%m-%d'),
'partner_id': partner_id,
'debit': reference_amount,
'account_id': dest_account_id,
}
credit_line_vals = {
'name': move.name,
'product_id': move.product_id and move.product_id.id or False,
'quantity': move.product_qty,
'ref': move.picking_id and move.picking_id.name or False,
'date': time.strftime('%Y-%m-%d'),
'partner_id': partner_id,
'credit': reference_amount,
'account_id': src_account_id,
}
# if we are posting to accounts in a different currency, provide correct values in both currencies correctly
# when compatible with the optional secondary currency on the account.
# Financial Accounts only accept amounts in secondary currencies if there's no secondary currency on the account
# or if it's the same as that of the secondary amount being posted.
account_obj = self.pool.get('account.account')
src_acct, dest_acct = account_obj.browse(cr, uid, [src_account_id, dest_account_id], context=context)
src_main_currency_id = src_acct.company_id.currency_id.id
dest_main_currency_id = dest_acct.company_id.currency_id.id
cur_obj = self.pool.get('res.currency')
if reference_currency_id != src_main_currency_id:
# fix credit line:
credit_line_vals['credit'] = cur_obj.compute(cr, uid, reference_currency_id, src_main_currency_id, reference_amount, context=context)
if (not src_acct.currency_id) or src_acct.currency_id.id == reference_currency_id:
credit_line_vals.update(currency_id=reference_currency_id, amount_currency=reference_amount)
if reference_currency_id != dest_main_currency_id:
# fix debit line:
debit_line_vals['debit'] = cur_obj.compute(cr, uid, reference_currency_id, dest_main_currency_id, reference_amount, context=context)
if (not dest_acct.currency_id) or dest_acct.currency_id.id == reference_currency_id:
debit_line_vals.update(currency_id=reference_currency_id, amount_currency=reference_amount)
return [(0, 0, debit_line_vals), (0, 0, credit_line_vals)]
def unlink(self, cr, uid, ids, context=None):
if context is None:
context = {}
ctx = context.copy()
for move in self.browse(cr, uid, ids, context=context):
if move.state != 'draft' and not ctx.get('call_unlink',False):
raise osv.except_osv(_('User Error!'),
_('You can only delete draft moves.'))
return super(stock_move, self).unlink(
cr, uid, ids, context=ctx)
# _create_lot function is not used anywhere
def _create_lot(self, cr, uid, ids, product_id, prefix=False):
""" Creates production lot
@return: Production lot id
"""
prodlot_obj = self.pool.get('stock.production.lot')
prodlot_id = prodlot_obj.create(cr, uid, {'prefix': prefix, 'product_id': product_id})
return prodlot_id
def action_scrap(self, cr, uid, ids, quantity, location_id, context=None):
""" Move the scrap/damaged product into scrap location
@param cr: the database cursor
@param uid: the user id
@param ids: ids of stock move object to be scrapped
@param quantity : specify scrap qty
@param location_id : specify scrap location
@param context: context arguments
@return: Scraped lines
"""
#quantity should in MOVE UOM
if quantity <= 0:
raise osv.except_osv(_('Warning!'), _('Please provide a positive quantity to scrap.'))
res = []
for move in self.browse(cr, uid, ids, context=context):
move_qty = move.product_qty
uos_qty = quantity / move_qty * move.product_uos_qty
default_val = {
'product_qty': quantity,
'product_uos_qty': uos_qty,
'state': move.state,
'scrapped' : True,
'location_dest_id': location_id,
'tracking_id': move.tracking_id.id,
'prodlot_id': move.prodlot_id.id,
}
new_move = self.copy(cr, uid, move.id, default_val)
res += [new_move]
product_obj = self.pool.get('product.product')
for product in product_obj.browse(cr, uid, [move.product_id.id], context=context):
if move.picking_id:
uom = product.uom_id.name if product.uom_id else ''
message = _("%s %s %s has been <b>moved to</b> scrap.") % (quantity, uom, product.name)
move.picking_id.message_post(body=message)
self.action_done(cr, uid, res, context=context)
return res
# action_split function is not used anywhere
# FIXME: deprecate this method
def action_split(self, cr, uid, ids, quantity, split_by_qty=1, prefix=False, with_lot=True, context=None):
""" Split Stock Move lines into production lot which specified split by quantity.
@param cr: the database cursor
@param uid: the user id
@param ids: ids of stock move object to be splited
@param split_by_qty : specify split by qty
@param prefix : specify prefix of production lot
@param with_lot : if true, prodcution lot will assign for split line otherwise not.
@param context: context arguments
@return: Splited move lines
"""
if context is None:
context = {}
if quantity <= 0:
raise osv.except_osv(_('Warning!'), _('Please provide proper quantity.'))
res = []
for move in self.browse(cr, uid, ids, context=context):
if split_by_qty <= 0 or quantity == 0:
return res
uos_qty = split_by_qty / move.product_qty * move.product_uos_qty
quantity_rest = quantity % split_by_qty
uos_qty_rest = split_by_qty / move.product_qty * move.product_uos_qty
update_val = {
'product_qty': split_by_qty,
'product_uos_qty': uos_qty,
}
for idx in range(int(quantity//split_by_qty)):
if not idx and move.product_qty<=quantity:
current_move = move.id
else:
current_move = self.copy(cr, uid, move.id, {'state': move.state})
res.append(current_move)
if with_lot:
update_val['prodlot_id'] = self._create_lot(cr, uid, [current_move], move.product_id.id)
self.write(cr, uid, [current_move], update_val)
if quantity_rest > 0:
idx = int(quantity//split_by_qty)
update_val['product_qty'] = quantity_rest
update_val['product_uos_qty'] = uos_qty_rest
if not idx and move.product_qty<=quantity:
current_move = move.id
else:
current_move = self.copy(cr, uid, move.id, {'state': move.state})
res.append(current_move)
if with_lot:
update_val['prodlot_id'] = self._create_lot(cr, uid, [current_move], move.product_id.id)
self.write(cr, uid, [current_move], update_val)
return res
def action_consume(self, cr, uid, ids, quantity, location_id=False, context=None):
""" Consumed product with specific quatity from specific source location
@param cr: the database cursor
@param uid: the user id
@param ids: ids of stock move object to be consumed
@param quantity : specify consume quantity
@param location_id : specify source location
@param context: context arguments
@return: Consumed lines
"""
#quantity should in MOVE UOM
if context is None:
context = {}
if quantity <= 0:
raise osv.except_osv(_('Warning!'), _('Please provide proper quantity.'))
res = []
for move in self.browse(cr, uid, ids, context=context):
move_qty = move.product_qty
if move_qty <= 0:
raise osv.except_osv(_('Error!'), _('Cannot consume a move with negative or zero quantity.'))
quantity_rest = move.product_qty
quantity_rest -= quantity
uos_qty_rest = quantity_rest / move_qty * move.product_uos_qty
if quantity_rest <= 0:
quantity_rest = 0
uos_qty_rest = 0
quantity = move.product_qty
uos_qty = quantity / move_qty * move.product_uos_qty
if quantity_rest > 0:
default_val = {
'product_qty': quantity,
'product_uos_qty': uos_qty,
'state': move.state,
'location_id': location_id or move.location_id.id,
}
current_move = self.copy(cr, uid, move.id, default_val)
res += [current_move]
update_val = {}
update_val['product_qty'] = quantity_rest
update_val['product_uos_qty'] = uos_qty_rest
self.write(cr, uid, [move.id], update_val)
else:
quantity_rest = quantity
uos_qty_rest = uos_qty
res += [move.id]
update_val = {
'product_qty' : quantity_rest,
'product_uos_qty' : uos_qty_rest,
'location_id': location_id or move.location_id.id,
}
self.write(cr, uid, [move.id], update_val)
self.action_done(cr, uid, res, context=context)
return res
# FIXME: needs refactoring, this code is partially duplicated in stock_picking.do_partial()!
def do_partial(self, cr, uid, ids, partial_datas, context=None):
""" Makes partial pickings and moves done.
@param partial_datas: Dictionary containing details of partial picking
like partner_id, delivery_date, delivery
moves with product_id, product_qty, uom
"""
res = {}
picking_obj = self.pool.get('stock.picking')
product_obj = self.pool.get('product.product')
currency_obj = self.pool.get('res.currency')
uom_obj = self.pool.get('product.uom')
wf_service = netsvc.LocalService("workflow")
if context is None:
context = {}
complete, too_many, too_few = [], [], []
move_product_qty = {}
prodlot_ids = {}
for move in self.browse(cr, uid, ids, context=context):
if move.state in ('done', 'cancel'):
continue
partial_data = partial_datas.get('move%s'%(move.id), False)
assert partial_data, _('Missing partial picking data for move #%s.') % (move.id)
product_qty = partial_data.get('product_qty',0.0)
move_product_qty[move.id] = product_qty
product_uom = partial_data.get('product_uom',False)
product_price = partial_data.get('product_price',0.0)
product_currency = partial_data.get('product_currency',False)
prodlot_ids[move.id] = partial_data.get('prodlot_id')
if move.product_qty == product_qty:
complete.append(move)
elif move.product_qty > product_qty:
too_few.append(move)
else:
too_many.append(move)
# Average price computation
if (move.picking_id.type == 'in') and (move.product_id.cost_method == 'average'):
product = product_obj.browse(cr, uid, move.product_id.id)
move_currency_id = move.company_id.currency_id.id
context['currency_id'] = move_currency_id
qty = uom_obj._compute_qty(cr, uid, product_uom, product_qty, product.uom_id.id)
if qty > 0:
new_price = currency_obj.compute(cr, uid, product_currency,
move_currency_id, product_price)
new_price = uom_obj._compute_price(cr, uid, product_uom, new_price,
product.uom_id.id)
if product.qty_available <= 0:
new_std_price = new_price
else:
# Get the standard price
amount_unit = product.price_get('standard_price', context=context)[product.id]
new_std_price = ((amount_unit * product.qty_available)\
+ (new_price * qty))/(product.qty_available + qty)
product_obj.write(cr, uid, [product.id],{'standard_price': new_std_price})
# Record the values that were chosen in the wizard, so they can be
# used for inventory valuation if real-time valuation is enabled.
self.write(cr, uid, [move.id],
{'price_unit': product_price,
'price_currency_id': product_currency,
})
for move in too_few:
product_qty = move_product_qty[move.id]
if product_qty != 0:
defaults = {
'product_qty' : product_qty,
'product_uos_qty': product_qty,
'picking_id' : move.picking_id.id,
'state': 'assigned',
'move_dest_id': False,
'price_unit': move.price_unit,
}
prodlot_id = prodlot_ids[move.id]
if prodlot_id:
defaults.update(prodlot_id=prodlot_id)
new_move = self.copy(cr, uid, move.id, defaults)
complete.append(self.browse(cr, uid, new_move))
self.write(cr, uid, [move.id],
{
'product_qty': move.product_qty - product_qty,
'product_uos_qty': move.product_qty - product_qty,
'prodlot_id': False,
'tracking_id': False,
})
for move in too_many:
self.write(cr, uid, [move.id],
{
'product_qty': move.product_qty,
'product_uos_qty': move.product_qty,
})
complete.append(move)
for move in complete:
if prodlot_ids.get(move.id):
self.write(cr, uid, [move.id],{'prodlot_id': prodlot_ids.get(move.id)})
self.action_done(cr, uid, [move.id], context=context)
if move.picking_id.id :
# TOCHECK : Done picking if all moves are done
cr.execute("""
SELECT move.id FROM stock_picking pick
RIGHT JOIN stock_move move ON move.picking_id = pick.id AND move.state = %s
WHERE pick.id = %s""",
('done', move.picking_id.id))
res = cr.fetchall()
if len(res) == len(move.picking_id.move_lines):
picking_obj.action_move(cr, uid, [move.picking_id.id])
wf_service.trg_validate(uid, 'stock.picking', move.picking_id.id, 'button_done', cr)
return [move.id for move in complete]
stock_move()
class stock_inventory(osv.osv):
_name = "stock.inventory"
_description = "Inventory"
_columns = {
'name': fields.char('Inventory Reference', size=64, required=True, readonly=True, states={'draft': [('readonly', False)]}),
'date': fields.datetime('Creation Date', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'date_done': fields.datetime('Date done'),
'inventory_line_id': fields.one2many('stock.inventory.line', 'inventory_id', 'Inventories', readonly=True, states={'draft': [('readonly', False)]}),
'move_ids': fields.many2many('stock.move', 'stock_inventory_move_rel', 'inventory_id', 'move_id', 'Created Moves'),
'state': fields.selection( (('draft', 'Draft'), ('cancel','Cancelled'), ('confirm','Confirmed'), ('done', 'Done')), 'Status', readonly=True, select=True),
'company_id': fields.many2one('res.company', 'Company', required=True, select=True, readonly=True, states={'draft':[('readonly',False)]}),
}
_defaults = {
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'state': 'draft',
'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.inventory', context=c)
}
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
default = default.copy()
default.update({'move_ids': [], 'date_done': False})
return super(stock_inventory, self).copy(cr, uid, id, default, context=context)
def _inventory_line_hook(self, cr, uid, inventory_line, move_vals):
""" Creates a stock move from an inventory line
@param inventory_line:
@param move_vals:
@return:
"""
return self.pool.get('stock.move').create(cr, uid, move_vals)
def action_done(self, cr, uid, ids, context=None):
""" Finish the inventory
@return: True
"""
if context is None:
context = {}
move_obj = self.pool.get('stock.move')
for inv in self.browse(cr, uid, ids, context=context):
move_obj.action_done(cr, uid, [x.id for x in inv.move_ids], context=context)
self.write(cr, uid, [inv.id], {'state':'done', 'date_done': time.strftime('%Y-%m-%d %H:%M:%S')}, context=context)
return True
def action_confirm(self, cr, uid, ids, context=None):
""" Confirm the inventory and writes its finished date
@return: True
"""
if context is None:
context = {}
# to perform the correct inventory corrections we need analyze stock location by
# location, never recursively, so we use a special context
product_context = dict(context, compute_child=False)
location_obj = self.pool.get('stock.location')
for inv in self.browse(cr, uid, ids, context=context):
move_ids = []
for line in inv.inventory_line_id:
pid = line.product_id.id
product_context.update(uom=line.product_uom.id, to_date=inv.date, date=inv.date, prodlot_id=line.prod_lot_id.id)
amount = location_obj._product_get(cr, uid, line.location_id.id, [pid], product_context)[pid]
change = line.product_qty - amount
lot_id = line.prod_lot_id.id
if change:
location_id = line.product_id.property_stock_inventory.id
value = {
'name': _('INV:') + (line.inventory_id.name or ''),
'product_id': line.product_id.id,
'product_uom': line.product_uom.id,
'prodlot_id': lot_id,
'date': inv.date,
}
if change > 0:
value.update( {
'product_qty': change,
'location_id': location_id,
'location_dest_id': line.location_id.id,
})
else:
value.update( {
'product_qty': -change,
'location_id': line.location_id.id,
'location_dest_id': location_id,
})
move_ids.append(self._inventory_line_hook(cr, uid, line, value))
self.write(cr, uid, [inv.id], {'state': 'confirm', 'move_ids': [(6, 0, move_ids)]})
self.pool.get('stock.move').action_confirm(cr, uid, move_ids, context=context)
return True
def action_cancel_draft(self, cr, uid, ids, context=None):
""" Cancels the stock move and change inventory state to draft.
@return: True
"""
for inv in self.browse(cr, uid, ids, context=context):
self.pool.get('stock.move').action_cancel(cr, uid, [x.id for x in inv.move_ids], context=context)
self.write(cr, uid, [inv.id], {'state':'draft'}, context=context)
return True
def action_cancel_inventory(self, cr, uid, ids, context=None):
""" Cancels both stock move and inventory
@return: True
"""
move_obj = self.pool.get('stock.move')
account_move_obj = self.pool.get('account.move')
for inv in self.browse(cr, uid, ids, context=context):
move_obj.action_cancel(cr, uid, [x.id for x in inv.move_ids], context=context)
for move in inv.move_ids:
account_move_ids = account_move_obj.search(cr, uid, [('name', '=', move.name)])
if account_move_ids:
account_move_data_l = account_move_obj.read(cr, uid, account_move_ids, ['state'], context=context)
for account_move in account_move_data_l:
if account_move['state'] == 'posted':
raise osv.except_osv(_('User Error!'),
_('In order to cancel this inventory, you must first unpost related journal entries.'))
account_move_obj.unlink(cr, uid, [account_move['id']], context=context)
self.write(cr, uid, [inv.id], {'state': 'cancel'}, context=context)
return True
stock_inventory()
class stock_inventory_line(osv.osv):
_name = "stock.inventory.line"
_description = "Inventory Line"
_rec_name = "inventory_id"
_columns = {
'inventory_id': fields.many2one('stock.inventory', 'Inventory', ondelete='cascade', select=True),
'location_id': fields.many2one('stock.location', 'Location', required=True),
'product_id': fields.many2one('product.product', 'Product', required=True, select=True),
'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True),
'product_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure')),
'company_id': fields.related('inventory_id','company_id',type='many2one',relation='res.company',string='Company',store=True, select=True, readonly=True),
'prod_lot_id': fields.many2one('stock.production.lot', 'Serial Number', domain="[('product_id','=',product_id)]"),
'state': fields.related('inventory_id','state',type='char',string='Status',readonly=True),
}
def _default_stock_location(self, cr, uid, context=None):
stock_location = self.pool.get('ir.model.data').get_object(cr, uid, 'stock', 'stock_location_stock')
return stock_location.id
_defaults = {
'location_id': _default_stock_location
}
def on_change_product_id(self, cr, uid, ids, location_id, product, uom=False, to_date=False):
""" Changes UoM and name if product_id changes.
@param location_id: Location id
@param product: Changed product_id
@param uom: UoM product
@return: Dictionary of changed values
"""
if not product:
return {'value': {'product_qty': 0.0, 'product_uom': False, 'prod_lot_id': False}}
obj_product = self.pool.get('product.product').browse(cr, uid, product)
uom = uom or obj_product.uom_id.id
amount = self.pool.get('stock.location')._product_get(cr, uid, location_id, [product], {'uom': uom, 'to_date': to_date, 'compute_child': False})[product]
result = {'product_qty': amount, 'product_uom': uom, 'prod_lot_id': False}
return {'value': result}
stock_inventory_line()
#----------------------------------------------------------
# Stock Warehouse
#----------------------------------------------------------
class stock_warehouse(osv.osv):
_name = "stock.warehouse"
_description = "Warehouse"
_columns = {
'name': fields.char('Name', size=128, required=True, select=True),
'company_id': fields.many2one('res.company', 'Company', required=True, select=True),
'partner_id': fields.many2one('res.partner', 'Owner Address'),
'lot_input_id': fields.many2one('stock.location', 'Location Input', required=True, domain=[('usage','<>','view')]),
'lot_stock_id': fields.many2one('stock.location', 'Location Stock', required=True, domain=[('usage','=','internal')]),
'lot_output_id': fields.many2one('stock.location', 'Location Output', required=True, domain=[('usage','<>','view')]),
}
def _default_lot_input_stock_id(self, cr, uid, context=None):
lot_input_stock = self.pool.get('ir.model.data').get_object(cr, uid, 'stock', 'stock_location_stock')
return lot_input_stock.id
def _default_lot_output_id(self, cr, uid, context=None):
lot_output = self.pool.get('ir.model.data').get_object(cr, uid, 'stock', 'stock_location_output')
return lot_output.id
_defaults = {
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.inventory', context=c),
'lot_input_id': _default_lot_input_stock_id,
'lot_stock_id': _default_lot_input_stock_id,
'lot_output_id': _default_lot_output_id,
}
stock_warehouse()
#----------------------------------------------------------
# "Empty" Classes that are used to vary from the original stock.picking (that are dedicated to the internal pickings)
# in order to offer a different usability with different views, labels, available reports/wizards...
#----------------------------------------------------------
class stock_picking_in(osv.osv):
_name = "stock.picking.in"
_inherit = "stock.picking"
_table = "stock_picking"
_description = "Incoming Shipments"
def check_access_rights(self, cr, uid, operation, raise_exception=True):
#override in order to redirect the check of acces rights on the stock.picking object
return self.pool.get('stock.picking').check_access_rights(cr, uid, operation, raise_exception=raise_exception)
def check_access_rule(self, cr, uid, ids, operation, context=None):
#override in order to redirect the check of acces rules on the stock.picking object
return self.pool.get('stock.picking').check_access_rule(cr, uid, ids, operation, context=context)
def _workflow_trigger(self, cr, uid, ids, trigger, context=None):
#override in order to trigger the workflow of stock.picking at the end of create, write and unlink operation
#instead of it's own workflow (which is not existing)
return self.pool.get('stock.picking')._workflow_trigger(cr, uid, ids, trigger, context=context)
def _workflow_signal(self, cr, uid, ids, signal, context=None):
#override in order to fire the workflow signal on given stock.picking workflow instance
#instead of it's own workflow (which is not existing)
return self.pool.get('stock.picking')._workflow_signal(cr, uid, ids, signal, context=context)
_columns = {
'backorder_id': fields.many2one('stock.picking.in', 'Back Order of', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True),
'state': fields.selection(
[('draft', 'Draft'),
('auto', 'Waiting Another Operation'),
('confirmed', 'Waiting Availability'),
('assigned', 'Ready to Receive'),
('done', 'Received'),
('cancel', 'Cancelled'),],
'Status', readonly=True, select=True,
help="""* Draft: not confirmed yet and will not be scheduled until confirmed\n
* Waiting Another Operation: waiting for another move to proceed before it becomes automatically available (e.g. in Make-To-Order flows)\n
* Waiting Availability: still waiting for the availability of products\n
* Ready to Receive: products reserved, simply waiting for confirmation.\n
* Received: has been processed, can't be modified or cancelled anymore\n
* Cancelled: has been cancelled, can't be confirmed anymore"""),
}
_defaults = {
'type': 'in',
}
class stock_picking_out(osv.osv):
_name = "stock.picking.out"
_inherit = "stock.picking"
_table = "stock_picking"
_description = "Delivery Orders"
def check_access_rights(self, cr, uid, operation, raise_exception=True):
#override in order to redirect the check of acces rights on the stock.picking object
return self.pool.get('stock.picking').check_access_rights(cr, uid, operation, raise_exception=raise_exception)
def check_access_rule(self, cr, uid, ids, operation, context=None):
#override in order to redirect the check of acces rules on the stock.picking object
return self.pool.get('stock.picking').check_access_rule(cr, uid, ids, operation, context=context)
def _workflow_trigger(self, cr, uid, ids, trigger, context=None):
#override in order to trigger the workflow of stock.picking at the end of create, write and unlink operation
#instead of it's own workflow (which is not existing)
return self.pool.get('stock.picking')._workflow_trigger(cr, uid, ids, trigger, context=context)
def _workflow_signal(self, cr, uid, ids, signal, context=None):
#override in order to fire the workflow signal on given stock.picking workflow instance
#instead of it's own workflow (which is not existing)
return self.pool.get('stock.picking')._workflow_signal(cr, uid, ids, signal, context=context)
_columns = {
'backorder_id': fields.many2one('stock.picking.out', 'Back Order of', states={'done':[('readonly', True)], 'cancel':[('readonly',True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True),
'state': fields.selection(
[('draft', 'Draft'),
('auto', 'Waiting Another Operation'),
('confirmed', 'Waiting Availability'),
('assigned', 'Ready to Deliver'),
('done', 'Delivered'),
('cancel', 'Cancelled'),],
'Status', readonly=True, select=True,
help="""* Draft: not confirmed yet and will not be scheduled until confirmed\n
* Waiting Another Operation: waiting for another move to proceed before it becomes automatically available (e.g. in Make-To-Order flows)\n
* Waiting Availability: still waiting for the availability of products\n
* Ready to Deliver: products reserved, simply waiting for confirmation.\n
* Delivered: has been processed, can't be modified or cancelled anymore\n
* Cancelled: has been cancelled, can't be confirmed anymore"""),
}
_defaults = {
'type': 'out',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
535521469/crawler_sth | refs/heads/master | scrapy/nbsc/settings.py | 1 | # Scrapy settings for tutorial project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/topics/settings.html
#
BOT_NAME = 'nbsc'
SPIDER_MODULES = ['scrapy.nbsc.spiders.tjbz_spider']
#NEWSPIDER_MODULE = 'scrapy.statsgov.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'tutorial (+http://www.yourdomain.com)'
|
eneldoserrata/marcos_openerp | refs/heads/master | marcos_addons/pos_cashier/__init__.py | 1 | # -*- coding: utf-8 -*-
##############################################################################
# Module : POS Cashiers
# Manage cashiers for Point Of Sale
# Author : Thierry Godin <[email protected]>
# Copyright (C) 2013 Thierry Godin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
##############################################################################
import pos_cashier |
dronefly/dronefly.github.io | refs/heads/master | flask/lib/python2.7/site-packages/guess_language/blocks.py | 65 | ''' Categorize unicode characters by the code block in which they are found.
Copyright (c) 2008, Kent S Johnson
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
'''
import os, re
from bisect import bisect_left
def _loadBlocks():
''' Load Blocks.txt.
Create and return two parallel lists. One has the start and end points for
codepoint ranges, the second has the corresponding block name.
'''
# Expects our version of Blocks.txt to be in the same dir as this file
blocksPath = os.path.join(os.path.dirname(__file__), 'Blocks.txt')
endpoints = []
names = []
splitter = re.compile(r'^(....)\.\.(....); (.*)$')
for line in open(blocksPath):
if line.startswith('#'):
continue
line = line.strip()
if not line:
continue
m = splitter.match(line)
assert m
start = int(m.group(1), 16)
end = int(m.group(2), 16)
name = m.group(3)
endpoints.append(start)
endpoints.append(end)
names.append(name)
names.append(name)
return endpoints, names
_endpoints, _names = _loadBlocks()
def unicodeBlock(c):
''' Returns the name of the unicode block containing c
c must be a single character. '''
ix = bisect_left(_endpoints, ord(c))
return _names[ix]
|
astropy/conda-build-tools | refs/heads/master | extruder/copy_packages.py | 1 | from __future__ import print_function
from argparse import ArgumentParser
import os
from ruamel import yaml
from binstar_client.utils import get_server_api
from binstar_client.errors import NotFound
from conda.version import VersionOrder
__all__ = ['PackageCopier']
class PackageCopier(object):
def __init__(self, source, destination, input_packages, token=''):
"""
Parameters
----------
source : ``str``
Name of source conda channel.
destination : ``str``
Name of destination conda channel.
input_package : ``dict``
Dictionary in which keys are package names and values are either
a string version number (e.g. ``'1.0.1'``) or ``None``, which
indicates the latest version on the source channel should be
copied. This dictionary should contain the packages that
potentially need to be copied.
token : ``str``, optional
Token for conda API. Needed for the actual copy operation.
"""
self.source = source
self.destination = destination
self.input_packages = input_packages
self.api = get_server_api(token)
self.to_copy = self._package_versions_to_copy()
def _package_versions_to_copy(self):
"""
Determine which version of each package in packages
should be copied from conda channel source to channel
destination.
Returns
-------
``dict``
Dictionary whose keys are the packages that actually need to be
copied and whose values are the version to be copied.
"""
packages = self.input_packages
copy_versions = {}
for p, version in packages.items():
copy_builds = []
need_to_copy = False
# This will end up True if the version exists on both src and dest
# and triggers a comparison of file names. Technically, it could
# be omitted, but seems more likely to be clear to future me.
check_builds = False
cf = self.api.package(self.source, p)
cf_version = VersionOrder(cf['latest_version'])
if version is not None:
pinned_version = VersionOrder(version)
else:
pinned_version = None
if pinned_version is not None:
if str(pinned_version) not in cf['versions']:
error_message = ('Version {} of package {} not '
'found on source channel {}.')
err = error_message.format(pinned_version, p,
self.source)
raise RuntimeError(err)
try:
ap = self.api.package(self.destination, p)
except NotFound:
need_to_copy = True
ap_version = None
else:
ap_version = VersionOrder(ap['latest_version'])
if pinned_version is None:
if cf_version > ap_version:
need_to_copy = True
elif cf_version == ap_version:
check_builds = True
else:
if str(pinned_version) not in ap['versions']:
need_to_copy = True
else:
check_builds = True
if check_builds:
# If we get here it means that the same version is on both
# source and destination so we need to check the individual
# builds.
check_version = pinned_version or cf_version
copy_builds = \
self._check_for_missing_builds(cf,
ap,
check_version)
need_to_copy = len(copy_builds) > 0
if need_to_copy:
copy_versions[p] = (str(cf_version), copy_builds)
return copy_versions
def _check_for_missing_builds(self, source, dest, version):
"""
For two packages that have the same version, see if there are any
files on the source that are not on the destination.
source and dest are both conda channels, and version
should be a string.
"""
def files_for_version(channel, version):
files = [f['basename'] for f in channel['files']
if VersionOrder(version) == VersionOrder(f['version'])]
return files
source_files = files_for_version(source, version)
destination_files = files_for_version(dest, version)
need_to_copy = [src for src in source_files
if src not in destination_files]
return need_to_copy
def copy_packages(self):
"""
Actually do the copying of the packages.
"""
for p, v in self.to_copy.items():
version, buildnames = v
if not buildnames:
# Copy all of the builds for this version
self.api.copy(self.source, p, version, to_owner=self.destination)
else:
for build in buildnames:
self.api.copy(self.source, p, version,
basename=build,
to_owner=self.destination)
def main(arguments=None):
parser = ArgumentParser('Simple script for copying packages '
'from one conda owner to another')
parser.add_argument('packages_yaml',
help=('Packages to copy, as a yaml dictionary. '
'Keys are package names, values are version, '
'or None for the latest version from '
'the source.'))
parser.add_argument('--source', default='conda-forge',
help='Source conda channel owner.')
parser.add_argument('--token', default='',
help=('anaconda.org API token. May set '
'environmental variable BINSTAR_TOKEN '
'instead.'))
parser.add_argument('destination_channel',
help=('Destination conda channel owner.'))
if arguments is None:
args = parser.parse_args()
else:
args = parser.parse_args(arguments)
source = args.source
dest = args.destination_channel
package_file = args.packages_yaml
token = args.token
with open(package_file) as f:
packages = yaml.load(f)
# No token on command line, try the environment...
if not token:
token = os.getenv('BINSTAR_TOKEN')
# Still no token, so raise an error
if not token:
raise RuntimeError('Set an anaconda.org API token before running')
pc = PackageCopier(source, dest, packages, token=token)
pc.copy_packages()
if __name__ == '__main__':
main()
|
a-tsvetkov/lucid-python-django-registration | refs/heads/master | registration/management/commands/cleanupregistration.py | 232 | """
A management command which deletes expired accounts (e.g.,
accounts which signed up but never activated) from the database.
Calls ``RegistrationProfile.objects.delete_expired_users()``, which
contains the actual logic for determining which accounts are deleted.
"""
from django.core.management.base import NoArgsCommand
from registration.models import RegistrationProfile
class Command(NoArgsCommand):
help = "Delete expired user registrations from the database"
def handle_noargs(self, **options):
RegistrationProfile.objects.delete_expired_users()
|
ohgodscience/LearningPython | refs/heads/master | piglow/clock.py | 3 | ######################################
## A binary clock using the PiGlow ##
## ##
## Example by Jason - @Boeeerb ##
######################################
from piglow import PiGlow
from time import sleep
from datetime import datetime
piglow = PiGlow()
### You can customise these settings ###
show12hr = 1 # Show 12 or 24hr clock - 0= 24hr, 1= 12hr
ledbrightness = 10 # Set brightness of LED - 1-255 (recommend 10-20, put 0 and you won't see it!)
hourflash = 1 # Choose how to flash change of hour - 1= white leds, 2= all flash
armtop = "s" # h= hour, m= minutes, s= seconds
armright = "m"
armbottom = "h"
### End of customising ###
piglow.all(0)
hourcount = 0
hourcurrent = 0
while True:
time = datetime.now().time()
hour,min,sec = str(time).split(":")
# Bug fix by Phil Moyer - Tested and verified by Ric Woods - Thanks guys!
try:
rv = str(sec).index(".")
sec,micro = str(sec).split(".")
except ValueError:
sec = str(sec)
micro = "0"
hour = int(hour)
if show12hr == 1:
if hour > 12:
hour = hour - 12
min = int(min)
sec = int(sec)
binhour = "%06d" % int(bin(hour)[2:])
binmin = "%06d" % int(bin(min)[2:])
binsec = "%06d" % int(bin(sec)[2:])
# Check if current hour is different and set ready to flash hour
if hourcurrent != hour:
hourcount = hour
hourcurrent = hour
if armbottom == "h":
arm3 = list(binhour)
elif armbottom == "m":
arm3 = list(binmin)
else:
arm3 = list(binsec)
led13 = ledbrightness if arm3[5] == "1" else 0
piglow.led(13,led13)
led14 = ledbrightness if arm3[4] == "1" else 0
piglow.led(14,led14)
led15 = ledbrightness if arm3[3] == "1" else 0
piglow.led(15,led15)
led16 = ledbrightness if arm3[2] == "1" else 0
piglow.led(16,led16)
led17 = ledbrightness if arm3[1] == "1" else 0
piglow.led(17,led17)
led18 = ledbrightness if arm3[0] == "1" else 0
piglow.led(18,led18)
if armright == "h":
arm2 = list(binhour)
elif armright == "m":
arm2 = list(binmin)
else:
arm2 = list(binsec)
led07 = ledbrightness if arm2[5] == "1" else 0
piglow.led(7,led07)
led08 = ledbrightness if arm2[4] == "1" else 0
piglow.led(8,led08)
led09 = ledbrightness if arm2[3] == "1" else 0
piglow.led(9,led09)
led10 = ledbrightness if arm2[2] == "1" else 0
piglow.led(10,led10)
led11 = ledbrightness if arm2[1] == "1" else 0
piglow.led(11,led11)
led12 = ledbrightness if arm2[0] == "1" else 0
piglow.led(12,led12)
if armtop == "h":
arm1 = list(binhour)
elif armtop == "m":
arm1 = list(binmin)
else:
arm1 = list(binsec)
led01 = ledbrightness if arm1[5] == "1" else 0
piglow.led(1,led01)
led02 = ledbrightness if arm1[4] == "1" else 0
piglow.led(2,led02)
led03 = ledbrightness if arm1[3] == "1" else 0
piglow.led(3,led03)
led04 = ledbrightness if arm1[2] == "1" else 0
piglow.led(4,led04)
led05 = ledbrightness if arm1[1] == "1" else 0
piglow.led(5,led05)
led06 = ledbrightness if arm1[0] == "1" else 0
piglow.led(6,led06)
# Flash the white leds for the hour
if hourcount != 0:
sleep(0.5)
if hourflash == 1:
piglow.white(ledbrightness)
if hourflash == 2:
piglow.all(ledbrightness)
sleep(0.5)
hourcount = hourcount - 1
else:
sleep(0.1)
|
hcs/mailman | refs/heads/master | src/mailman/bin/find_member.py | 3 | # Copyright (C) 1998-2012 by the Free Software Foundation, Inc.
#
# This file is part of GNU Mailman.
#
# GNU Mailman is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# GNU Mailman is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# GNU Mailman. If not, see <http://www.gnu.org/licenses/>.
import re
import sys
import optparse
from mailman import errors
from mailman import MailList
from mailman.configuration import config
from mailman.core.i18n import _
from mailman.version import MAILMAN_VERSION
AS_MEMBER = 0x01
AS_OWNER = 0x02
def parseargs():
parser = optparse.OptionParser(version=MAILMAN_VERSION,
usage=_("""\
%prog [options] regex [regex ...]
Find all lists that a member's address is on.
The interaction between -l and -x (see below) is as follows. If any -l option
is given then only the named list will be included in the search. If any -x
option is given but no -l option is given, then all lists will be search
except those specifically excluded.
Regular expression syntax uses the Python 're' module. Complete
specifications are at:
http://www.python.org/doc/current/lib/module-re.html
Address matches are case-insensitive, but case-preserved addresses are
displayed."""))
parser.add_option('-l', '--listname',
type='string', default=[], action='append',
dest='listnames',
help=_('Include only the named list in the search'))
parser.add_option('-x', '--exclude',
type='string', default=[], action='append',
dest='excludes',
help=_('Exclude the named list from the search'))
parser.add_option('-w', '--owners',
default=False, action='store_true',
help=_('Search list owners as well as members'))
parser.add_option('-C', '--config',
help=_('Alternative configuration file to use'))
opts, args = parser.parse_args()
if not args:
parser.print_help()
print >> sys.stderr, _('Search regular expression required')
sys.exit(1)
return parser, opts, args
def main():
parser, opts, args = parseargs()
config.load(opts.config)
listnames = opts.listnames or config.list_manager.names
includes = set(listname.lower() for listname in listnames)
excludes = set(listname.lower() for listname in opts.excludes)
listnames = includes - excludes
if not listnames:
print _('No lists to search')
return
cres = []
for r in args:
cres.append(re.compile(r, re.IGNORECASE))
# dictionary of {address, (listname, ownerp)}
matches = {}
for listname in listnames:
try:
mlist = MailList.MailList(listname, lock=False)
except errors.MMListError:
print _('No such list: $listname')
continue
if opts.owners:
owners = mlist.owner
else:
owners = []
for cre in cres:
for member in mlist.getMembers():
if cre.search(member):
addr = mlist.getMemberCPAddress(member)
entries = matches.get(addr, {})
aswhat = entries.get(listname, 0)
aswhat |= AS_MEMBER
entries[listname] = aswhat
matches[addr] = entries
for owner in owners:
if cre.search(owner):
entries = matches.get(owner, {})
aswhat = entries.get(listname, 0)
aswhat |= AS_OWNER
entries[listname] = aswhat
matches[owner] = entries
addrs = matches.keys()
addrs.sort()
for k in addrs:
hits = matches[k]
lists = hits.keys()
print k, _('found in:')
for name in lists:
aswhat = hits[name]
if aswhat & AS_MEMBER:
print ' ', name
if aswhat & AS_OWNER:
print ' ', name, _('(as owner)')
if __name__ == '__main__':
main()
|
JinnLynn/alfred-workflows | refs/heads/master | lib/bs4/diagnose.py | 431 | """Diagnostic functions, mainly for use when doing tech support."""
import cProfile
from StringIO import StringIO
from HTMLParser import HTMLParser
import bs4
from bs4 import BeautifulSoup, __version__
from bs4.builder import builder_registry
import os
import pstats
import random
import tempfile
import time
import traceback
import sys
import cProfile
def diagnose(data):
"""Diagnostic suite for isolating common problems."""
print "Diagnostic running on Beautiful Soup %s" % __version__
print "Python version %s" % sys.version
basic_parsers = ["html.parser", "html5lib", "lxml"]
for name in basic_parsers:
for builder in builder_registry.builders:
if name in builder.features:
break
else:
basic_parsers.remove(name)
print (
"I noticed that %s is not installed. Installing it may help." %
name)
if 'lxml' in basic_parsers:
basic_parsers.append(["lxml", "xml"])
from lxml import etree
print "Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION))
if 'html5lib' in basic_parsers:
import html5lib
print "Found html5lib version %s" % html5lib.__version__
if hasattr(data, 'read'):
data = data.read()
elif os.path.exists(data):
print '"%s" looks like a filename. Reading data from the file.' % data
data = open(data).read()
elif data.startswith("http:") or data.startswith("https:"):
print '"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data
print "You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup."
return
print
for parser in basic_parsers:
print "Trying to parse your markup with %s" % parser
success = False
try:
soup = BeautifulSoup(data, parser)
success = True
except Exception, e:
print "%s could not parse the markup." % parser
traceback.print_exc()
if success:
print "Here's what %s did with the markup:" % parser
print soup.prettify()
print "-" * 80
def lxml_trace(data, html=True, **kwargs):
"""Print out the lxml events that occur during parsing.
This lets you see how lxml parses a document when no Beautiful
Soup code is running.
"""
from lxml import etree
for event, element in etree.iterparse(StringIO(data), html=html, **kwargs):
print("%s, %4s, %s" % (event, element.tag, element.text))
class AnnouncingParser(HTMLParser):
"""Announces HTMLParser parse events, without doing anything else."""
def _p(self, s):
print(s)
def handle_starttag(self, name, attrs):
self._p("%s START" % name)
def handle_endtag(self, name):
self._p("%s END" % name)
def handle_data(self, data):
self._p("%s DATA" % data)
def handle_charref(self, name):
self._p("%s CHARREF" % name)
def handle_entityref(self, name):
self._p("%s ENTITYREF" % name)
def handle_comment(self, data):
self._p("%s COMMENT" % data)
def handle_decl(self, data):
self._p("%s DECL" % data)
def unknown_decl(self, data):
self._p("%s UNKNOWN-DECL" % data)
def handle_pi(self, data):
self._p("%s PI" % data)
def htmlparser_trace(data):
"""Print out the HTMLParser events that occur during parsing.
This lets you see how HTMLParser parses a document when no
Beautiful Soup code is running.
"""
parser = AnnouncingParser()
parser.feed(data)
_vowels = "aeiou"
_consonants = "bcdfghjklmnpqrstvwxyz"
def rword(length=5):
"Generate a random word-like string."
s = ''
for i in range(length):
if i % 2 == 0:
t = _consonants
else:
t = _vowels
s += random.choice(t)
return s
def rsentence(length=4):
"Generate a random sentence-like string."
return " ".join(rword(random.randint(4,9)) for i in range(length))
def rdoc(num_elements=1000):
"""Randomly generate an invalid HTML document."""
tag_names = ['p', 'div', 'span', 'i', 'b', 'script', 'table']
elements = []
for i in range(num_elements):
choice = random.randint(0,3)
if choice == 0:
# New tag.
tag_name = random.choice(tag_names)
elements.append("<%s>" % tag_name)
elif choice == 1:
elements.append(rsentence(random.randint(1,4)))
elif choice == 2:
# Close a tag.
tag_name = random.choice(tag_names)
elements.append("</%s>" % tag_name)
return "<html>" + "\n".join(elements) + "</html>"
def benchmark_parsers(num_elements=100000):
"""Very basic head-to-head performance benchmark."""
print "Comparative parser benchmark on Beautiful Soup %s" % __version__
data = rdoc(num_elements)
print "Generated a large invalid HTML document (%d bytes)." % len(data)
for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]:
success = False
try:
a = time.time()
soup = BeautifulSoup(data, parser)
b = time.time()
success = True
except Exception, e:
print "%s could not parse the markup." % parser
traceback.print_exc()
if success:
print "BS4+%s parsed the markup in %.2fs." % (parser, b-a)
from lxml import etree
a = time.time()
etree.HTML(data)
b = time.time()
print "Raw lxml parsed the markup in %.2fs." % (b-a)
import html5lib
parser = html5lib.HTMLParser()
a = time.time()
parser.parse(data)
b = time.time()
print "Raw html5lib parsed the markup in %.2fs." % (b-a)
def profile(num_elements=100000, parser="lxml"):
filehandle = tempfile.NamedTemporaryFile()
filename = filehandle.name
data = rdoc(num_elements)
vars = dict(bs4=bs4, data=data, parser=parser)
cProfile.runctx('bs4.BeautifulSoup(data, parser)' , vars, vars, filename)
stats = pstats.Stats(filename)
# stats.strip_dirs()
stats.sort_stats("cumulative")
stats.print_stats('_html5lib|bs4', 50)
if __name__ == '__main__':
diagnose(sys.stdin.read())
|
nikolas/edx-platform | refs/heads/master | common/djangoapps/student/tests/test_reset_password.py | 54 | """
Test the various password reset flows
"""
import json
import re
import unittest
from django.core.cache import cache
from django.conf import settings
from django.test import TestCase
from django.test.client import RequestFactory
from django.contrib.auth.models import User
from django.contrib.auth.hashers import UNUSABLE_PASSWORD
from django.contrib.auth.tokens import default_token_generator
from django.utils.http import int_to_base36
from mock import Mock, patch
import ddt
from student.views import password_reset, password_reset_confirm_wrapper, SETTING_CHANGE_INITIATED
from student.tests.factories import UserFactory
from student.tests.test_email import mock_render_to_string
from util.testing import EventTestMixin
from test_microsite import fake_site_name
@ddt.ddt
class ResetPasswordTests(EventTestMixin, TestCase):
""" Tests that clicking reset password sends email, and doesn't activate the user
"""
request_factory = RequestFactory()
def setUp(self):
super(ResetPasswordTests, self).setUp('student.views.tracker')
self.user = UserFactory.create()
self.user.is_active = False
self.user.save()
self.token = default_token_generator.make_token(self.user)
self.uidb36 = int_to_base36(self.user.id)
self.user_bad_passwd = UserFactory.create()
self.user_bad_passwd.is_active = False
self.user_bad_passwd.password = UNUSABLE_PASSWORD
self.user_bad_passwd.save()
@patch('student.views.render_to_string', Mock(side_effect=mock_render_to_string, autospec=True))
def test_user_bad_password_reset(self):
"""Tests password reset behavior for user with password marked UNUSABLE_PASSWORD"""
bad_pwd_req = self.request_factory.post('/password_reset/', {'email': self.user_bad_passwd.email})
bad_pwd_resp = password_reset(bad_pwd_req)
# If they've got an unusable password, we return a successful response code
self.assertEquals(bad_pwd_resp.status_code, 200)
obj = json.loads(bad_pwd_resp.content)
self.assertEquals(obj, {
'success': True,
'value': "('registration/password_reset_done.html', [])",
})
self.assert_no_events_were_emitted()
@patch('student.views.render_to_string', Mock(side_effect=mock_render_to_string, autospec=True))
def test_nonexist_email_password_reset(self):
"""Now test the exception cases with of reset_password called with invalid email."""
bad_email_req = self.request_factory.post('/password_reset/', {'email': self.user.email + "makeItFail"})
bad_email_resp = password_reset(bad_email_req)
# Note: even if the email is bad, we return a successful response code
# This prevents someone potentially trying to "brute-force" find out which
# emails are and aren't registered with edX
self.assertEquals(bad_email_resp.status_code, 200)
obj = json.loads(bad_email_resp.content)
self.assertEquals(obj, {
'success': True,
'value': "('registration/password_reset_done.html', [])",
})
self.assert_no_events_were_emitted()
@patch('student.views.render_to_string', Mock(side_effect=mock_render_to_string, autospec=True))
def test_password_reset_ratelimited(self):
""" Try (and fail) resetting password 30 times in a row on an non-existant email address """
cache.clear()
for i in xrange(30):
good_req = self.request_factory.post('/password_reset/', {
'email': 'thisdoesnotexist{0}@foo.com'.format(i)
})
good_resp = password_reset(good_req)
self.assertEquals(good_resp.status_code, 200)
# then the rate limiter should kick in and give a HttpForbidden response
bad_req = self.request_factory.post('/password_reset/', {'email': '[email protected]'})
bad_resp = password_reset(bad_req)
self.assertEquals(bad_resp.status_code, 403)
self.assert_no_events_were_emitted()
cache.clear()
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
@patch('django.core.mail.send_mail')
@patch('student.views.render_to_string', Mock(side_effect=mock_render_to_string, autospec=True))
def test_reset_password_email(self, send_email):
"""Tests contents of reset password email, and that user is not active"""
good_req = self.request_factory.post('/password_reset/', {'email': self.user.email})
good_req.user = self.user
good_resp = password_reset(good_req)
self.assertEquals(good_resp.status_code, 200)
obj = json.loads(good_resp.content)
self.assertEquals(obj, {
'success': True,
'value': "('registration/password_reset_done.html', [])",
})
(subject, msg, from_addr, to_addrs) = send_email.call_args[0]
self.assertIn("Password reset", subject)
self.assertIn("You're receiving this e-mail because you requested a password reset", msg)
self.assertEquals(from_addr, settings.DEFAULT_FROM_EMAIL)
self.assertEquals(len(to_addrs), 1)
self.assertIn(self.user.email, to_addrs)
self.assert_event_emitted(
SETTING_CHANGE_INITIATED, user_id=self.user.id, setting=u'password', old=None, new=None,
)
#test that the user is not active
self.user = User.objects.get(pk=self.user.pk)
self.assertFalse(self.user.is_active)
re.search(r'password_reset_confirm/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/', msg).groupdict()
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
@patch('django.core.mail.send_mail')
@ddt.data((False, 'http://'), (True, 'https://'))
@ddt.unpack
def test_reset_password_email_https(self, is_secure, protocol, send_email):
"""
Tests that the right url protocol is included in the reset password link
"""
req = self.request_factory.post(
'/password_reset/', {'email': self.user.email}
)
req.is_secure = Mock(return_value=is_secure)
req.user = self.user
password_reset(req)
_, msg, _, _ = send_email.call_args[0]
expected_msg = "Please go to the following page and choose a new password:\n\n" + protocol
self.assertIn(expected_msg, msg)
self.assert_event_emitted(
SETTING_CHANGE_INITIATED, user_id=self.user.id, setting=u'password', old=None, new=None
)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
@patch('django.core.mail.send_mail')
@ddt.data(('Crazy Awesome Site', 'Crazy Awesome Site'), (None, 'edX'))
@ddt.unpack
def test_reset_password_email_domain(self, domain_override, platform_name, send_email):
"""
Tests that the right url domain and platform name is included in
the reset password email
"""
with patch("django.conf.settings.PLATFORM_NAME", platform_name):
req = self.request_factory.post(
'/password_reset/', {'email': self.user.email}
)
req.get_host = Mock(return_value=domain_override)
req.user = self.user
password_reset(req)
_, msg, _, _ = send_email.call_args[0]
reset_msg = "you requested a password reset for your user account at {}"
if domain_override:
reset_msg = reset_msg.format(domain_override)
else:
reset_msg = reset_msg.format(settings.SITE_NAME)
self.assertIn(reset_msg, msg)
sign_off = "The {} Team".format(platform_name)
self.assertIn(sign_off, msg)
self.assert_event_emitted(
SETTING_CHANGE_INITIATED, user_id=self.user.id, setting=u'password', old=None, new=None
)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
@patch("microsite_configuration.microsite.get_value", fake_site_name)
@patch('django.core.mail.send_mail')
def test_reset_password_email_microsite(self, send_email):
"""
Tests that the right url domain and platform name is included in
the reset password email
"""
req = self.request_factory.post(
'/password_reset/', {'email': self.user.email}
)
req.get_host = Mock(return_value=None)
req.user = self.user
password_reset(req)
_, msg, _, _ = send_email.call_args[0]
reset_msg = "you requested a password reset for your user account at openedx.localhost"
self.assertIn(reset_msg, msg)
self.assert_event_emitted(
SETTING_CHANGE_INITIATED, user_id=self.user.id, setting=u'password', old=None, new=None
)
@patch('student.views.password_reset_confirm')
def test_reset_password_bad_token(self, reset_confirm):
"""Tests bad token and uidb36 in password reset"""
bad_reset_req = self.request_factory.get('/password_reset_confirm/NO-OP/')
password_reset_confirm_wrapper(bad_reset_req, 'NO', 'OP')
confirm_kwargs = reset_confirm.call_args[1]
self.assertEquals(confirm_kwargs['uidb36'], 'NO')
self.assertEquals(confirm_kwargs['token'], 'OP')
self.user = User.objects.get(pk=self.user.pk)
self.assertFalse(self.user.is_active)
@patch('student.views.password_reset_confirm')
def test_reset_password_good_token(self, reset_confirm):
"""Tests good token and uidb36 in password reset"""
good_reset_req = self.request_factory.get('/password_reset_confirm/{0}-{1}/'.format(self.uidb36, self.token))
password_reset_confirm_wrapper(good_reset_req, self.uidb36, self.token)
confirm_kwargs = reset_confirm.call_args[1]
self.assertEquals(confirm_kwargs['uidb36'], self.uidb36)
self.assertEquals(confirm_kwargs['token'], self.token)
self.user = User.objects.get(pk=self.user.pk)
self.assertTrue(self.user.is_active)
@patch('student.views.password_reset_confirm')
def test_reset_password_with_reused_password(self, reset_confirm):
"""Tests good token and uidb36 in password reset"""
good_reset_req = self.request_factory.get('/password_reset_confirm/{0}-{1}/'.format(self.uidb36, self.token))
password_reset_confirm_wrapper(good_reset_req, self.uidb36, self.token)
confirm_kwargs = reset_confirm.call_args[1]
self.assertEquals(confirm_kwargs['uidb36'], self.uidb36)
self.assertEquals(confirm_kwargs['token'], self.token)
self.user = User.objects.get(pk=self.user.pk)
self.assertTrue(self.user.is_active)
|
Outernet-Project/librarian-opener-html | refs/heads/master | setup.py | 1 | import os
from setuptools import setup, find_packages
import librarian_opener_html as pkg
def read(fname):
""" Return content of specified file """
return open(os.path.join(os.path.dirname(__file__), fname)).read()
VERSION = pkg.__version__
setup(
name='librarian-opener-html',
version=VERSION,
license='BSD',
packages=[pkg.__name__],
include_package_data=True,
long_description=read('README.rst'),
install_requires=[
'librarian_core',
'librarian_filemanager',
],
dependency_links=[
'git+ssh://[email protected]/Outernet-Project/librarian-core.git#egg=librarian_core-0.1',
'git+ssh://[email protected]/Outernet-Project/librarian-filemanager.git#egg=librarian_filemanager-0.1',
],
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Applicaton',
'Framework :: Bottle',
'Environment :: Web Environment',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
wiki05/youtube-dl | refs/heads/master | youtube_dl/extractor/cliphunter.py | 113 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import determine_ext
_translation_table = {
'a': 'h', 'd': 'e', 'e': 'v', 'f': 'o', 'g': 'f', 'i': 'd', 'l': 'n',
'm': 'a', 'n': 'm', 'p': 'u', 'q': 't', 'r': 's', 'v': 'p', 'x': 'r',
'y': 'l', 'z': 'i',
'$': ':', '&': '.', '(': '=', '^': '&', '=': '/',
}
def _decode(s):
return ''.join(_translation_table.get(c, c) for c in s)
class CliphunterIE(InfoExtractor):
IE_NAME = 'cliphunter'
_VALID_URL = r'''(?x)http://(?:www\.)?cliphunter\.com/w/
(?P<id>[0-9]+)/
(?P<seo>.+?)(?:$|[#\?])
'''
_TEST = {
'url': 'http://www.cliphunter.com/w/1012420/Fun_Jynx_Maze_solo',
'md5': 'b7c9bbd4eb3a226ab91093714dcaa480',
'info_dict': {
'id': '1012420',
'ext': 'flv',
'title': 'Fun Jynx Maze solo',
'thumbnail': 're:^https?://.*\.jpg$',
'age_limit': 18,
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_title = self._search_regex(
r'mediaTitle = "([^"]+)"', webpage, 'title')
fmts = {}
for fmt in ('mp4', 'flv'):
fmt_list = self._parse_json(self._search_regex(
r'var %sjson\s*=\s*(\[.*?\]);' % fmt, webpage, '%s formats' % fmt), video_id)
for f in fmt_list:
fmts[f['fname']] = _decode(f['sUrl'])
qualities = self._parse_json(self._search_regex(
r'var player_btns\s*=\s*(.*?);\n', webpage, 'quality info'), video_id)
formats = []
for fname, url in fmts.items():
f = {
'url': url,
}
if fname in qualities:
qual = qualities[fname]
f.update({
'format_id': '%s_%sp' % (determine_ext(url), qual['h']),
'width': qual['w'],
'height': qual['h'],
'tbr': qual['br'],
})
formats.append(f)
self._sort_formats(formats)
thumbnail = self._search_regex(
r"var\s+mov_thumb\s*=\s*'([^']+)';",
webpage, 'thumbnail', fatal=False)
return {
'id': video_id,
'title': video_title,
'formats': formats,
'age_limit': self._rta_search(webpage),
'thumbnail': thumbnail,
}
|
aoakeson/home-assistant | refs/heads/dev | tests/components/media_player/test_demo.py | 1 | """The tests for the Demo Media player platform."""
import unittest
from unittest.mock import patch
import homeassistant.components.media_player as mp
from tests.common import get_test_home_assistant
entity_id = 'media_player.walkman'
class TestDemoMediaPlayer(unittest.TestCase):
"""Test the media_player module."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
def test_volume_services(self):
"""Test the volume service."""
assert mp.setup(self.hass, {'media_player': {'platform': 'demo'}})
state = self.hass.states.get(entity_id)
assert 1.0 == state.attributes.get('volume_level')
mp.set_volume_level(self.hass, None, entity_id)
self.hass.pool.block_till_done()
state = self.hass.states.get(entity_id)
assert 1.0 == state.attributes.get('volume_level')
mp.set_volume_level(self.hass, 0.5, entity_id)
self.hass.pool.block_till_done()
state = self.hass.states.get(entity_id)
assert 0.5 == state.attributes.get('volume_level')
mp.volume_down(self.hass, entity_id)
self.hass.pool.block_till_done()
state = self.hass.states.get(entity_id)
assert 0.4 == state.attributes.get('volume_level')
mp.volume_up(self.hass, entity_id)
self.hass.pool.block_till_done()
state = self.hass.states.get(entity_id)
assert 0.5 == state.attributes.get('volume_level')
assert False is state.attributes.get('is_volume_muted')
mp.mute_volume(self.hass, None, entity_id)
self.hass.pool.block_till_done()
state = self.hass.states.get(entity_id)
assert False is state.attributes.get('is_volume_muted')
mp.mute_volume(self.hass, True, entity_id)
self.hass.pool.block_till_done()
state = self.hass.states.get(entity_id)
assert True is state.attributes.get('is_volume_muted')
def test_turning_off_and_on(self):
"""Test turn_on and turn_off."""
assert mp.setup(self.hass, {'media_player': {'platform': 'demo'}})
assert self.hass.states.is_state(entity_id, 'playing')
mp.turn_off(self.hass, entity_id)
self.hass.pool.block_till_done()
assert self.hass.states.is_state(entity_id, 'off')
assert not mp.is_on(self.hass, entity_id)
mp.turn_on(self.hass, entity_id)
self.hass.pool.block_till_done()
assert self.hass.states.is_state(entity_id, 'playing')
mp.toggle(self.hass, entity_id)
self.hass.pool.block_till_done()
assert self.hass.states.is_state(entity_id, 'off')
assert not mp.is_on(self.hass, entity_id)
def test_playing_pausing(self):
"""Test media_pause."""
assert mp.setup(self.hass, {'media_player': {'platform': 'demo'}})
assert self.hass.states.is_state(entity_id, 'playing')
mp.media_pause(self.hass, entity_id)
self.hass.pool.block_till_done()
assert self.hass.states.is_state(entity_id, 'paused')
mp.media_play_pause(self.hass, entity_id)
self.hass.pool.block_till_done()
assert self.hass.states.is_state(entity_id, 'playing')
mp.media_play_pause(self.hass, entity_id)
self.hass.pool.block_till_done()
assert self.hass.states.is_state(entity_id, 'paused')
mp.media_play(self.hass, entity_id)
self.hass.pool.block_till_done()
assert self.hass.states.is_state(entity_id, 'playing')
def test_prev_next_track(self):
"""Test media_next_track and media_previous_track ."""
assert mp.setup(self.hass, {'media_player': {'platform': 'demo'}})
state = self.hass.states.get(entity_id)
assert 1 == state.attributes.get('media_track')
assert 0 == (mp.SUPPORT_PREVIOUS_TRACK &
state.attributes.get('supported_media_commands'))
mp.media_next_track(self.hass, entity_id)
self.hass.pool.block_till_done()
state = self.hass.states.get(entity_id)
assert 2 == state.attributes.get('media_track')
assert 0 < (mp.SUPPORT_PREVIOUS_TRACK &
state.attributes.get('supported_media_commands'))
mp.media_next_track(self.hass, entity_id)
self.hass.pool.block_till_done()
state = self.hass.states.get(entity_id)
assert 3 == state.attributes.get('media_track')
assert 0 < (mp.SUPPORT_PREVIOUS_TRACK &
state.attributes.get('supported_media_commands'))
mp.media_previous_track(self.hass, entity_id)
self.hass.pool.block_till_done()
state = self.hass.states.get(entity_id)
assert 2 == state.attributes.get('media_track')
assert 0 < (mp.SUPPORT_PREVIOUS_TRACK &
state.attributes.get('supported_media_commands'))
assert mp.setup(self.hass, {'media_player': {'platform': 'demo'}})
ent_id = 'media_player.lounge_room'
state = self.hass.states.get(ent_id)
assert 1 == state.attributes.get('media_episode')
assert 0 == (mp.SUPPORT_PREVIOUS_TRACK &
state.attributes.get('supported_media_commands'))
mp.media_next_track(self.hass, ent_id)
self.hass.pool.block_till_done()
state = self.hass.states.get(ent_id)
assert 2 == state.attributes.get('media_episode')
assert 0 < (mp.SUPPORT_PREVIOUS_TRACK &
state.attributes.get('supported_media_commands'))
mp.media_previous_track(self.hass, ent_id)
self.hass.pool.block_till_done()
state = self.hass.states.get(ent_id)
assert 1 == state.attributes.get('media_episode')
assert 0 == (mp.SUPPORT_PREVIOUS_TRACK &
state.attributes.get('supported_media_commands'))
@patch('homeassistant.components.media_player.demo.DemoYoutubePlayer.'
'media_seek')
def test_play_media(self, mock_seek):
"""Test play_media ."""
assert mp.setup(self.hass, {'media_player': {'platform': 'demo'}})
ent_id = 'media_player.living_room'
state = self.hass.states.get(ent_id)
assert 0 < (mp.SUPPORT_PLAY_MEDIA &
state.attributes.get('supported_media_commands'))
assert state.attributes.get('media_content_id') is not None
mp.play_media(self.hass, None, 'some_id', ent_id)
self.hass.pool.block_till_done()
state = self.hass.states.get(ent_id)
assert 0 < (mp.SUPPORT_PLAY_MEDIA &
state.attributes.get('supported_media_commands'))
assert not 'some_id' == state.attributes.get('media_content_id')
mp.play_media(self.hass, 'youtube', 'some_id', ent_id)
self.hass.pool.block_till_done()
state = self.hass.states.get(ent_id)
assert 0 < (mp.SUPPORT_PLAY_MEDIA &
state.attributes.get('supported_media_commands'))
assert 'some_id' == state.attributes.get('media_content_id')
assert not mock_seek.called
mp.media_seek(self.hass, None, ent_id)
self.hass.pool.block_till_done()
assert not mock_seek.called
mp.media_seek(self.hass, 100, ent_id)
self.hass.pool.block_till_done()
assert mock_seek.called
|
zchking/odoo | refs/heads/8.0 | addons/mrp_operations/report/mrp_code_barcode.py | 381 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.report import report_sxw
class code_barcode(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(code_barcode, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
})
report_sxw.report_sxw('report.mrp.code.barcode', 'mrp_operations.operation.code', 'addons/mrp_operations/report/mrp_code_barcode.rml',parser=code_barcode,header=False)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
XiaowenLin/cs598rk | refs/heads/master | reviews/reviewapp/migrations/__init__.py | 12133432 | |
rtts/letsy | refs/heads/master | django/balance/__init__.py | 12133432 | |
vasilenkomike/tarantool | refs/heads/master | test/xlog/misc.test.py | 9 | import os
import yaml
from os.path import abspath
# cleanup server.vardir
server.stop()
server.deploy()
lsn = int(yaml.load(server.admin("box.info.server.lsn", silent=True))[0])
server.stop()
data_path = os.path.join(server.vardir, server.name)
print """
# xlog file must exist after inserts.
"""
filename = str(lsn).zfill(20) + ".xlog"
wal = os.path.join(data_path, filename)
server.start()
server.admin("space = box.schema.space.create('tweedledum', { id = 0 })")
if os.access(wal, os.F_OK):
print ".xlog exists"
server.admin("index = space:create_index('primary', { type = 'hash' })")
server.stop()
lsn += 2
print """
# a new xlog must be opened after regular termination.
"""
filename = str(lsn).zfill(20) + ".xlog"
server.start()
wal = os.path.join(data_path, filename)
server.admin("box.space[0]:insert{3, 'third tuple'}")
if os.access(wal, os.F_OK):
print "a new .xlog exists"
server.stop()
if os.access(wal, os.F_OK):
print ".xlog stays around after sutdown"
lsn += 1
print """
# An xlog file with one record during recovery.
"""
server.start()
filename = str(lsn).zfill(20) + ".xlog"
wal = os.path.join(data_path, filename)
server.admin("box.space[0]:insert{4, 'fourth tuple'}")
server.admin("box.space[0]:insert{5, 'Unfinished record'}")
pid = int(yaml.load(server.admin("require('tarantool').pid()", silent=True))[0])
from signal import SIGKILL
if pid > 0:
os.kill(pid, SIGKILL)
server.stop()
if os.access(wal, os.F_OK):
print ".xlog exists after kill -9"
# Remove last byte from xlog
f = open(wal, "a")
size = f.tell()
f.truncate(size - 1)
f.close()
server.start()
if os.access(wal, os.F_OK):
print "corrupt .xlog exists after start"
server.stop()
lsn += 1
server.start()
orig_lsn = int(yaml.load(admin("box.info.server.lsn", silent=True))[0])
# create .snap.inprogress
admin("box.snapshot()")
admin("box.space._schema:insert({'test', 'test'})")
admin("box.snapshot()")
lsn = int(yaml.load(admin("box.info.server.lsn", silent=True))[0])
snapshot = str(lsn).zfill(20) + ".snap"
snapshot = os.path.join(data_path, snapshot)
server.stop()
os.rename(snapshot, snapshot + ".inprogress")
# remove .xlogs
for f in os.listdir(data_path):
if f.endswith(".xlog"):
os.remove(os.path.join(data_path, f))
# check that .snap.inprogress is ignored during scan
server.start()
lsn = int(yaml.load(admin("box.info.server.lsn", silent=True))[0])
if lsn == orig_lsn:
print ".snap.inprogress is ignored"
|
smartforceplus/SmartForceplus | refs/heads/master | addons/base_iban/__init__.py | 447 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base_iban
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
zhangguiyu/django-parler | refs/heads/master | parler/utils/conf.py | 8 | """
The configuration wrappers that are used for :ref:`PARLER_LANGUAGES`.
"""
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils import six
from django.utils.translation import get_language
from parler.utils.i18n import is_supported_django_language
import warnings
def add_default_language_settings(languages_list, var_name='PARLER_LANGUAGES', **extra_defaults):
"""
Apply extra defaults to the language settings.
This function can also be used by other packages to
create their own variation of ``PARLER_LANGUAGES`` with extra fields.
For example::
from django.conf import settings
from parler import appsettings as parler_appsettings
# Create local names, which are based on the global parler settings
MYAPP_DEFAULT_LANGUAGE_CODE = getattr(settings, 'MYAPP_DEFAULT_LANGUAGE_CODE', parler_appsettings.PARLER_DEFAULT_LANGUAGE_CODE)
MYAPP_LANGUAGES = getattr(settings, 'MYAPP_LANGUAGES', parler_appsettings.PARLER_LANGUAGES)
# Apply the defaults to the languages
MYAPP_LANGUAGES = parler_appsettings.add_default_language_settings(MYAPP_LANGUAGES, 'MYAPP_LANGUAGES',
code=MYAPP_DEFAULT_LANGUAGE_CODE,
fallback=MYAPP_DEFAULT_LANGUAGE_CODE,
hide_untranslated=False
)
The returned object will be an :class:`~parler.utils.conf.LanguagesSetting` object,
which adds additional methods to the :class:`dict` object.
:param languages_list: The settings, in :ref:`PARLER_LANGUAGES` format.
:param var_name: The name of your variable, for debugging output.
:param extra_defaults: Any defaults to override in the ``languages_list['default']`` section, e.g. ``code``, ``fallback``, ``hide_untranslated``.
:return: The updated ``languages_list`` with all defaults applied to all sections.
:rtype: LanguagesSetting
"""
languages_list = LanguagesSetting(languages_list)
languages_list.setdefault('default', {})
defaults = languages_list['default']
defaults.setdefault('hide_untranslated', False) # Whether queries with .active_translations() may or may not return the fallback language.
if 'fallback' in defaults:
#warnings.warn("Please use 'fallbacks' instead of 'fallback' in the 'defaults' section of {0}".format(var_name), DeprecationWarning)
defaults['fallbacks'] = [defaults.pop('fallback')]
if 'fallback' in extra_defaults:
#warnings.warn("Please use 'fallbacks' instead of 'fallback' in parameters for {0} = add_default_language_settings(..)".format(var_name), DeprecationWarning)
extra_defaults['fallbacks'] = [extra_defaults.pop('fallback')]
defaults.update(extra_defaults) # Also allow to override code and fallback this way.
# This function previously existed in appsettings, where it could reference the defaults directly.
# However, this module is a more logical place for this function. To avoid circular import problems,
# the 'code' and 'fallback' parameters are always passed by the appsettings module.
# In case these are missing, default to the original behavior for backwards compatibility.
if 'code' not in defaults:
from parler import appsettings
defaults['code'] = appsettings.PARLER_DEFAULT_LANGUAGE_CODE
if 'fallbacks' not in defaults:
from parler import appsettings
defaults['fallbacks'] = appsettings.PARLER_DEFAULT_LANGUAGE_CODE
if not is_supported_django_language(defaults['code']):
raise ImproperlyConfigured("The value for {0}['defaults']['code'] ('{1}') does not exist in LANGUAGES".format(var_name, defaults['code']))
for site_id, lang_choices in six.iteritems(languages_list):
if site_id == 'default':
continue
if not isinstance(lang_choices, (list, tuple)):
raise ImproperlyConfigured("{0}[{1}] should be a tuple of language choices!".format(var_name, site_id))
for i, choice in enumerate(lang_choices):
if not is_supported_django_language(choice['code']):
raise ImproperlyConfigured("{0}[{1}][{2}]['code'] does not exist in LANGUAGES".format(var_name, site_id, i))
# Copy all items from the defaults, so you can provide new fields too.
for key, value in six.iteritems(defaults):
choice.setdefault(key, value)
return languages_list
class LanguagesSetting(dict):
"""
This is the actual object type of the :ref:`PARLER_LANGUAGES` setting.
Besides the regular :class:`dict` behavior, it also adds some additional methods.
"""
def get_language(self, language_code, site_id=None):
"""
Return the language settings for the current site
This function can be used with other settings variables
to support modules which create their own variation of the ``PARLER_LANGUAGES`` setting.
For an example, see :func:`~parler.appsettings.add_default_language_settings`.
"""
if site_id is None:
site_id = getattr(settings, 'SITE_ID', None)
for lang_dict in self.get(site_id, ()):
if lang_dict['code'] == language_code:
return lang_dict
return self['default']
def get_active_choices(self, language_code=None, site_id=None):
"""
Find out which translations should be visible in the site.
It returns a list with either a single choice (the current language),
or a list with the current language + fallback language.
"""
if language_code is None:
language_code = get_language()
lang_dict = self.get_language(language_code, site_id=site_id)
if not lang_dict['hide_untranslated']:
return [language_code] + [lang for lang in lang_dict['fallbacks'] if lang != language_code]
else:
return [language_code]
def get_fallback_languages(self, language_code=None, site_id=None):
"""
Find out what the fallback language is for a given language choice.
.. versionadded 1.5
"""
choices = self.get_active_choices(language_code, site_id=site_id)
return choices[1:]
def get_fallback_language(self, language_code=None, site_id=None):
"""
Find out what the fallback language is for a given language choice.
.. deprecated:: 1.5
Use :func:`get_fallback_languages` instead.
"""
choices = self.get_active_choices(language_code, site_id=site_id)
if choices and len(choices) > 1:
# Still take the last, like previous code.
# With multiple fallback languages that means taking the base language.
# Hence, upgrade the code to use get_fallback_languages() instead.
return choices[-1]
else:
return None
def get_default_language(self):
"""
Return the default language.
"""
return self['default']['code']
def get_first_language(self, site_id=None):
"""
Return the first language for the current site.
This can be used for user interfaces, where the languages are displayed in tabs.
"""
if site_id is None:
site_id = getattr(settings, 'SITE_ID', None)
try:
return self[site_id][0]['code']
except (KeyError, IndexError):
# No configuration, always fallback to default language.
# This is essentially a non-multilingual configuration.
return self['default']['code']
|
gic888/MIEN | refs/heads/master | spatial/stacktool.py | 1 | #!/usr/bin/env python
# encoding: utf-8
#Created by on 2008-06-23.
# Copyright (C) 2008 Graham I Cummins
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
# Place, Suite 330, Boston, MA 02111-1307 USA
#
import cPickle, re
from mien.image.arrayops import image_to_array, array_to_image
from mien.wx.dialogs import wx, makeEntryWidget
from mien.wx.graphs.graphGL import *
class AlignmentTool(wx.Dialog):
def __init__(self, master):
wx.Dialog.__init__(self, master)
self.st=master
self.imcenter=None
self.SetTitle("Image Stack Alignment")
sizer = wx.BoxSizer(wx.VERTICAL)
tw = self.GetTextExtent("W")[0]*30
#anchor, pixelwidth, pixelheight, stackspacing, rotate
self.vals={}
for foo in ['Anchor X', 'Anchor Y', 'Anchor Z', 'PixelWidth', 'PixelHeight', 'StackSpacing', 'Rotation (Deg CCW)']:
d={'Name':foo}
if foo =='Anchor X':
d['Value']=float(self.st.stack.attrib('SpatialAnchor')[0])
elif foo =='Anchor Y':
d['Value']=float(self.st.stack.attrib('SpatialAnchor')[1])
elif foo =='Anchor Z':
d['Value']=float(self.st.stack.attrib('SpatialAnchor')[2])
elif foo.startswith('Rotation'):
d['Value']=self.getRotation()
else:
d['Value']=float(self.st.stack.attrib(foo))
box = makeEntryWidget(self, d)
self.vals[foo]=d
sizer.Add(box, 0, wx.GROW|wx.ALIGN_CENTER|wx.ALL, 5)
#apply
btn = wx.Button(self, -1, " Apply New Parameters ")
wx.EVT_BUTTON(self, btn.GetId(), self.apply)
btn.SetDefault()
sizer.Add(btn, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
#Reset center
btn = wx.Button(self, -1, " Set Center ")
wx.EVT_BUTTON(self, btn.GetId(), self.setcent)
sizer.Add(btn, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
#quit
btn = wx.Button(self, -1, " Close ")
wx.EVT_BUTTON(self, btn.GetId(), lambda x:self.Destroy())
sizer.Add(btn, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
self.SetSizer(sizer)
self.SetAutoLayout(True)
sizer.Fit(self)
def getRotation(self):
w=self.st.getSpatial()[1,:]
a=correctedArctan(w[0], w[1], 'deg')
return a
def setcent(self, event):
p=self.st.cv
g=p.graph
if not dot(g.forward, [0,0,-1])>.99:
p.report("You can only pick a center in plan view (e.g. z axis=[0,0,1])")
return
ulg,wg,hg=g.frontPlane()
a=self.st.getSpatial()
ul, down, w, h= a[0,:], a[3,:], a[1,:], a[2,:]
h=-h
size=self.st.stack.shape()
frame=0
while ulg[2]<ul[2]:
ul+=down
frame+=1
if frame>=size[3]:
p.report("No frames in view.")
return
cp=ulg+wg/2.0+hg/2.0
dfc=cp-ul
dfcx=dot(dfc, w)/sum(w**2)
dfcy=dot(dfc, h)/sum(h**2)
if max(dfcy, dfcx)>1.0 or min(dfcy, dfcx)<0.0:
p.report("View center isn't in a image")
return
w=round(size[0]*dfcx)
h=round(size[1]*dfcy)
self.imcenter=array([w, h, frame]).astype(int32)
p.report('setting center to voxel %i %i %i' % (self.imcenter[0], self.imcenter[1],self.imcenter[2]))
self.apply(None)
def apply(self, event):
if self.Validate() and self.TransferDataFromWindow():
ul=array( (self.vals['Anchor X']['Value'],
self.vals['Anchor Y']['Value'],
self.vals['Anchor Z']['Value'])
)
rot=self.vals['Rotation (Deg CCW)']['Value']
pw=self.vals['PixelWidth']['Value']
ph=self.vals['PixelHeight']['Value']
ss=self.vals['StackSpacing']['Value']
rot= rot % 360
rr=rot*pi/180
x=array((cos(rr), sin(rr), 0))
z=array((0.0, 0.0, -1.0))
y=cross(x, z)
size=self.st.stack.shape()[:2]
w=x*size[0]*pw
h=y*size[1]*ph
if self.imcenter!=None:
offset=array([pw*self.imcenter[0], -ph*self.imcenter[1], ss*self.imcenter[2]])
if rot:
offset=rotate3D(array([offset]), (0,0,rot))[0,:]
#ul=2*self.imcenter-offset
ul-=offset
down=z*ss
dat=vstack([ul, w, h, down])
self.st.setSpatial(dat)
class StackTool(wx.Dialog):
def __init__(self, master, stack):
wx.Dialog.__init__(self, master)
self.cv=master
self.stack=stack
self.condition(stack)
self.showalllines=0
self.frame=-1
self.SetTitle("Image Stack Tool")
sizer = wx.BoxSizer(wx.VERTICAL)
btn = wx.Button(self, -1, " Align Stack ")
wx.EVT_BUTTON(self, btn.GetId(), self.align)
sizer.Add(btn, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
btn = wx.Button(self, -1, " Edit Stack ")
wx.EVT_BUTTON(self, btn.GetId(), self.editor)
sizer.Add(btn, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
#nav
box = wx.BoxSizer(wx.HORIZONTAL)
btn = wx.Button(self, -1, " prev ")
wx.EVT_BUTTON(self, btn.GetId(), self.prev)
box.Add(btn, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
btn = wx.Button(self, -1, " next ")
wx.EVT_BUTTON(self, btn.GetId(), self.next)
box.Add(btn, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
sizer.Add(box, 0, wx.ALIGN_CENTER|wx.ALL, 5)
self.atZlevel= wx.TextCtrl(self, -1, "0", style=wx.TE_PROCESS_ENTER, size=(12,-1))
wx.EVT_TEXT_ENTER(self, self.atZlevel.GetId(), self.setZlevel)
sizer.Add(self.atZlevel, 1, wx.GROW|wx.ALIGN_CENTRE|wx.ALL, 5)
#show all lines
btn = wx.Button(self, -1, " Toggle Show all Line Fiducials ")
wx.EVT_BUTTON(self, btn.GetId(), self.toglines)
sizer.Add(btn, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
btn = wx.Button(self, -1, " Toggle Stack Transparency ")
wx.EVT_BUTTON(self, btn.GetId(), self.togtrans)
sizer.Add(btn, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
#animate
btn = wx.Button(self, -1, " Animate Stack ")
wx.EVT_BUTTON(self, btn.GetId(), self.animate)
sizer.Add(btn, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
btn = wx.Button(self, -1, " Close ")
wx.EVT_BUTTON(self, btn.GetId(), lambda x:self.Destroy())
sizer.Add(btn, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
self.SetSizer(sizer)
self.SetAutoLayout(True)
sizer.Fit(self)
self.Show(True)
def editor(self, event):
from mien.image.viewer import ImageViewer
i=ImageViewer(self.cv)
i.Show(True)
i.select(self.stack)
def report(self, s):
self.cv.report(s)
def condition(self, stack):
if not stack.attrib('SpatialAnchor'):
stack.setAttrib('SpatialAnchor', (0.0,0.0,0.0))
if not stack.attrib('SpatialVertical'):
stack.setAttrib('SpatialVertical', (0.0,1.0,0.0))
if not stack.attrib('SpatialDepth'):
stack.setAttrib('SpatialDepth', (0.0,0.0,-1.0))
if not stack.attrib('PixelWidth'):
stack.setAttrib('PixelWidth', 1.0)
if not stack.attrib('PixelHeight'):
stack.setAttrib('PixelHeight', stack.attrib('PixelWidth'))
if not stack.attrib('StackSpacing'):
stack.setAttrib('StackSpacing', 1.0)
pn= self.cv.getPlotName(stack)
if not pn:
pn=self.cv.graph.plotXML(stack)
self.cv.graph.OnDraw()
self.plotname=pn
def align(self, event):
foo=AlignmentTool(self)
foo.Show(True)
def prev(self, event):
self.frame-=1
self.displayzlevel(self.frame)
def next(self, event):
self.frame+=1
self.displayzlevel(self.frame)
def setZlevel(self, event):
l=int(self.atZlevel.GetValue())
self.displayzlevel(l)
def getSpatial(self):
ul=array(self.stack.attrib('SpatialAnchor'))
y=array(self.stack.attrib('SpatialVertical'))
z=array(self.stack.attrib('SpatialDepth'))
x=cross(z, y)
pw=self.stack.attrib('PixelWidth')
ph=self.stack.attrib('PixelHeight')
pd=self.stack.attrib('StackSpacing')
size=self.stack.getData().shape[:2]
down=z*pd
w=pw*x*size[0]
h=ph*y*size[1]
dat=vstack([ul, w, h, down])
return dat
def setSpatial(self, dat, draw=True):
self.stack.setAttrib('SpatialAnchor', tuple(dat[0,:]))
th=sqrt((dat[2,:]**2).sum())
vert=dat[2,:]/th
self.stack.setAttrib('SpatialVertical', tuple(vert))
ss=sqrt((dat[3,:]**2).sum())
dep=dat[3,:]/ss
self.stack.setAttrib('SpatialDepth', tuple(dep))
tw=sqrt((dat[1,:]**2).sum())
size=self.stack.getData().shape[:2]
self.stack.setAttrib('PixelWidth', tw/size[0])
self.stack.setAttrib('PixelHeight', th/size[1])
self.stack.setAttrib('StackSpacing', ss)
self.cv.graph.plots[self.plotname]['data']=dat
self.cv.graph.recalc(self.plotname)
if draw:
self.cv.graph.OnDraw()
def displayzlevel(self, z):
self.atZlevel.SetValue(str(z))
self.frame = z
a=self.getSpatial()
ul, down, w, h= a[0,:], a[3,:], a[1,:], a[2,:]
cp=ul+w/2+h/2
cp=cp+z*down+.0001*down
self.cv.graph.viewpoint[2]=cp[2]
self.cv.graph.forward=array([0.0,0,-1])
self.cv.graph.OnDraw()
def toglines(self, event):
g=self.cv.graph
if self.showalllines:
self.showalllines=False
for p in g.plots.keys():
pl=g.plots[p]
if pl['style']=='contour':
pl['lineprojection']=0
g.recalc(p)
else:
self.showalllines=True
a=self.getSpatial()
av=a[0,1]
mv=av
for p in g.plots.keys():
pl=g.plots[p]
if pl['style']=='contour':
pl['lineprojection']=1
g.recalc(p)
mv=av-mv
g.OnDraw()
def togtrans(self, event):
if self.stack.attrib('transparent'):
self.stack.setAttrib('transparent', '')
self.cv.graph.plots[self.plotname]['transparent']=False
else:
self.stack.setAttrib('transparent', 1)
self.cv.graph.plots[self.plotname]['transparent']=True
self.cv.graph.recalc(self.plotname)
self.cv.graph.OnDraw()
def animate(self, event):
dir='StackAnimation'
if os.path.isdir(dir):
os.system("rm -rf %s" % dir)
os.mkdir(dir)
for i in range(self.stack.shape()[3]):
self.displayzlevel(i)
g=self.cv.graph
fname=os.path.join(dir, "frame%05i.bmp" % i)
g.screenShot(fname=fname)
print fname
self.cv.report("Saved Images")
def launchStackTool(cv):
stacks = cv.document.getElements('Data', {'SampleType':'image'})
if not stacks:
cv.report("There are no image stacks in this document. Load some images using the normal MIEN file load or append functions.")
return
if len(stacks)==1:
StackTool(cv, stacks[0])
return
sd={}
for i in stacks:
si="%s %s" % (i.upath(), str(i.shape()))
sd[si]=i
d=cv.askParam([{'Name':"Which Stack?", "Type":"List", "Value":sd.keys()}])
if not d:
return
StackTool(cv, sd[d[0]])
|
kapilrastogi/Impala | refs/heads/cdh5-trunk | thirdparty/hive-1.1.0-cdh5.8.0-SNAPSHOT/lib/py/thrift/protocol/__init__.py | 83 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
__all__ = ['TProtocol', 'TBinaryProtocol', 'fastbinary']
|
gonzafirewall/kivy | refs/heads/master | kivy/tests/test_uix_widget.py | 59 | from kivy.tests.common import GraphicUnitTest
class UIXWidgetTestCase(GraphicUnitTest):
def test_default_widgets(self):
from kivy.uix.button import Button
from kivy.uix.slider import Slider
r = self.render
r(Button())
r(Slider())
def test_button_properties(self):
from kivy.uix.button import Button
r = self.render
# test label attribute inside button
r(Button(text='Hello world'))
r(Button(text='Multiline\ntext\nbutton'))
r(Button(text='Hello world', font_size=42))
r(Button(text='This is my first line\nSecond line', halign='center'))
def test_slider_properties(self):
from kivy.uix.slider import Slider
r = self.render
r(Slider(value=25))
r(Slider(value=50))
r(Slider(value=100))
r(Slider(min=-100, max=100, value=0))
r(Slider(orientation='vertical', value=25))
r(Slider(orientation='vertical', value=50))
r(Slider(orientation='vertical', value=100))
r(Slider(orientation='vertical', min=-100, max=100, value=0))
def test_image_properties(self):
from kivy.uix.image import Image
from os.path import dirname, join
r = self.render
filename = join(dirname(__file__), 'test_button.png')
r(Image(source=filename))
def test_add_widget_index_0(self):
from kivy.uix.widget import Widget
from kivy.uix.button import Button
r = self.render
root = Widget()
a = Button(text='Hello')
b = Button(text='World', pos=(50, 10))
c = Button(text='Kivy', pos=(10, 50))
root.add_widget(a)
root.add_widget(b)
root.add_widget(c, 0)
r(root)
def test_add_widget_index_1(self):
from kivy.uix.widget import Widget
from kivy.uix.button import Button
r = self.render
root = Widget()
a = Button(text='Hello')
b = Button(text='World', pos=(50, 10))
c = Button(text='Kivy', pos=(10, 50))
root.add_widget(a)
root.add_widget(b)
root.add_widget(c, 1)
r(root)
def test_add_widget_index_2(self):
from kivy.uix.widget import Widget
from kivy.uix.button import Button
r = self.render
root = Widget()
a = Button(text='Hello')
b = Button(text='World', pos=(50, 10))
c = Button(text='Kivy', pos=(10, 50))
root.add_widget(a)
root.add_widget(b)
root.add_widget(c, 2)
r(root)
def test_widget_root_from_code_with_kv(self):
from kivy.lang import Builder
from kivy.factory import Factory
from kivy.properties import StringProperty
from kivy.uix.floatlayout import FloatLayout
Builder.load_string("""
<MyWidget>:
Label:
text: root.title
<BaseWidget>:
CallerWidget:
""")
class CallerWidget(FloatLayout):
def __init__(self, **kwargs):
super(CallerWidget, self).__init__(**kwargs)
self.add_widget(MyWidget(title="Hello World"))
class NestedWidget(FloatLayout):
title = StringProperty('aa')
class MyWidget(NestedWidget):
pass
class BaseWidget(FloatLayout):
pass
Factory.register('MyWidget', cls=MyWidget)
Factory.register('CallerWidget', cls=CallerWidget)
r = self.render
root = BaseWidget()
r(root)
'''
def test_default_label(self):
from kivy.uix.label import Label
self.render(Label())
def test_button_state_down(self):
from kivy.uix.button import Button
self.render(Button(state='down'))
def test_label_text(self):
from kivy.uix.label import Label
self.render(Label(text='Hello world'))
def test_label_font_size(self):
from kivy.uix.label import Label
self.render(Label(text='Hello world', font_size=16))
def test_label_font_size(self):
from kivy.uix.label import Label
self.render(Label(text='Hello world'))
'''
|
hendrycks/fooling | refs/heads/master | tiny-imagenet/cs231n/layers.py | 1 | import numpy as np
def affine_forward(x, w, b):
"""
Computes the forward pass for an affine (fully-connected) layer.
The input x has shape (N, d_1, ..., d_k) where x[i] is the ith input.
We multiply this against a weight matrix of shape (D, M) where
D = \prod_i d_i
Inputs:
x - Input data, of shape (N, d_1, ..., d_k)
w - Weights, of shape (D, M)
b - Biases, of shape (M,)
Returns a tuple of:
- out: output, of shape (N, M)
- cache: (x, w, b)
"""
out = x.reshape(x.shape[0], -1).dot(w) + b
cache = (x, w, b)
return out, cache
def affine_backward(dout, cache):
"""
Computes the backward pass for an affine layer.
Inputs:
- dout: Upstream derivative, of shape (N, M)
- cache: Tuple of:
- x: Input data, of shape (N, d_1, ... d_k)
- w: Weights, of shape (D, M)
Returns a tuple of:
- dx: Gradient with respect to x, of shape (N, d1, ..., d_k)
- dw: Gradient with respect to w, of shape (D, M)
- db: Gradient with respect to b, of shape (M,)
"""
x, w, b = cache
dx = dout.dot(w.T).reshape(x.shape)
dw = x.reshape(x.shape[0], -1).T.dot(dout)
db = np.sum(dout, axis=0)
return dx, dw, db
def relu_forward(x):
"""
Computes the forward pass for a layer of rectified linear units (ReLUs).
Input:
- x: Inputs, of any shape
Returns a tuple of:
- out: Output, of the same shape as x
- cache: x
"""
out = np.maximum(0, x)
cache = x
return out, cache
def relu_backward(dout, cache):
"""
Computes the backward pass for a layer of rectified linear units (ReLUs).
Input:
- dout: Upstream derivatives, of any shape
- cache: Input x, of same shape as dout
Returns:
- dx: Gradient with respect to x
"""
x = cache
dx = np.where(x > 0, dout, 0)
return dx
def dropout_forward(x, dropout_param):
"""
Performs the forward pass for (inverted) dropout.
Inputs:
- x: Input data, of any shape
- dropout_param: A dictionary with the following keys:
- p: Dropout parameter. We keep each neuron output with probability p.
- mode: 'test' or 'train'. If the mode is train, then perform dropout;
if the mode is test, then just return the input.
- seed: Seed for the random number generator. Passing seed makes this
function deterministic, which is needed for gradient checking but not in
real networks.
Outputs:
- out: Array of the same shape as x.
- cache: A tuple (dropout_param, mask). In training mode, mask is the dropout
mask that was used to multiply the input; in test mode, mask is None.
"""
p, mode = dropout_param['p'], dropout_param['mode']
if 'seed' in dropout_param:
np.random.seed(dropout_param['seed'])
mask = None
out = None
if mode == 'train':
###########################################################################
# TODO: Implement the training phase forward pass for inverted dropout. #
# Store the dropout mask in the mask variable. #
###########################################################################
mask = np.random.rand(*x.shape)
mask[mask >= p] = 0
mask[mask != 0] = 1
out = x * mask
###########################################################################
# END OF YOUR CODE #
###########################################################################
elif mode == 'test':
###########################################################################
# TODO: Implement the test phase forward pass for inverted dropout. #
###########################################################################
out = x
###########################################################################
# END OF YOUR CODE #
###########################################################################
cache = (dropout_param, mask)
out = out.astype(x.dtype, copy=False)
return out, cache
def dropout_backward(dout, cache):
"""
Perform the backward pass for (inverted) dropout.
Inputs:
- dout: Upstream derivatives, of any shape
- cache: (dropout_param, mask) from dropout_forward.
"""
dropout_param, mask = cache
mode = dropout_param['mode']
if mode == 'train':
###########################################################################
# TODO: Implement the training phase forward pass for inverted dropout. #
# Store the dropout mask in the mask variable. #
###########################################################################
dx = dout * mask
###########################################################################
# END OF YOUR CODE #
###########################################################################
elif mode == 'test':
dx = dout
return dx
def svm_loss(x, y):
"""
Computes the loss and gradient using for multiclass SVM classification.
Inputs:
- x: Input data, of shape (N, C) where x[i, j] is the score for the jth class
for the ith input.
- y: Vector of labels, of shape (N,) where y[i] is the label for x[i] and
0 <= y[i] < C
Returns a tuple of:
- loss: Scalar giving the loss
- dx: Gradient of the loss with respect to x
"""
N = x.shape[0]
correct_class_scores = x[np.arange(N), y]
margins = np.maximum(0, x - correct_class_scores[:, np.newaxis] + 1.0)
margins[np.arange(N), y] = 0
loss = np.sum(margins) / N
num_pos = np.sum(margins > 0, axis=1)
dx = np.zeros_like(x)
dx[margins > 0] = 1
dx[np.arange(N), y] -= num_pos
dx /= N
return loss, dx
def softmax_loss(x, y):
"""
Computes the loss and gradient for softmax classification.
Inputs:
- x: Input data, of shape (N, C) where x[i, j] is the score for the jth class
for the ith input.
- y: Vector of labels, of shape (N,) where y[i] is the label for x[i] and
0 <= y[i] < C
Returns a tuple of:
- loss: Scalar giving the loss
- dx: Gradient of the loss with respect to x
"""
probs = np.exp(x - np.max(x, axis=1, keepdims=True))
probs /= np.sum(probs, axis=1, keepdims=True)
N = x.shape[0]
loss = -np.sum(np.log(probs[np.arange(N), y])) / N
dx = probs.copy()
dx[np.arange(N), y] -= 1
dx /= N
return loss, dx
|
yunity/foodsaving-backend | refs/heads/master | karrot/places/migrations/0026_auto_20171027_1232.py | 2 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-27 12:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('places', '0025_auto_20170908_1057'),
]
operations = [
migrations.AddField(
model_name='place',
name='status',
field=models.CharField(default='created', max_length=20),
),
]
|
mKeRix/home-assistant | refs/heads/dev | homeassistant/components/demo/config_flow.py | 3 | """Config flow to configure demo component."""
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
# pylint: disable=unused-import
from . import DOMAIN
CONF_STRING = "string"
CONF_BOOLEAN = "bool"
CONF_INT = "int"
CONF_SELECT = "select"
CONF_MULTISELECT = "multi"
class DemoConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Demo configuration flow."""
VERSION = 1
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return OptionsFlowHandler(config_entry)
async def async_step_import(self, import_info):
"""Set the config entry up from yaml."""
return self.async_create_entry(title="Demo", data={})
class OptionsFlowHandler(config_entries.OptionsFlow):
"""Handle options."""
def __init__(self, config_entry):
"""Initialize options flow."""
self.config_entry = config_entry
self.options = dict(config_entry.options)
async def async_step_init(self, user_input=None):
"""Manage the options."""
return await self.async_step_options_1()
async def async_step_options_1(self, user_input=None):
"""Manage the options."""
if user_input is not None:
self.options.update(user_input)
return await self.async_step_options_2()
return self.async_show_form(
step_id="options_1",
data_schema=vol.Schema(
{
vol.Required("constant"): "Constant Value",
vol.Optional(
CONF_BOOLEAN,
default=self.config_entry.options.get(CONF_BOOLEAN, False),
): bool,
vol.Optional(
CONF_INT, default=self.config_entry.options.get(CONF_INT, 10),
): int,
}
),
)
async def async_step_options_2(self, user_input=None):
"""Manage the options 2."""
if user_input is not None:
self.options.update(user_input)
return await self._update_options()
return self.async_show_form(
step_id="options_2",
data_schema=vol.Schema(
{
vol.Optional(
CONF_STRING,
default=self.config_entry.options.get(CONF_STRING, "Default",),
): str,
vol.Optional(
CONF_SELECT,
default=self.config_entry.options.get(CONF_SELECT, "default"),
): vol.In(["default", "other"]),
vol.Optional(
CONF_MULTISELECT,
default=self.config_entry.options.get(
CONF_MULTISELECT, ["default"]
),
): cv.multi_select({"default": "Default", "other": "Other"}),
}
),
)
async def _update_options(self):
"""Update config entry options."""
return self.async_create_entry(title="", data=self.options)
|
daanwierstra/pybrain | refs/heads/master | pybrain/rl/learners/discrete/sarsa.py | 1 | __author__ = 'Thomas Rueckstiess, [email protected]'
from pybrain.rl.learners.rllearner import RLLearner
class SARSA(RLLearner):
def __init__(self, nActions):
self.alpha = 0.5
self.gamma = 0.99
self.laststate = None
self.lastaction = None
self.nActions = nActions
def learn(self):
""" learn on the current dataset, for a single step. """
""" TODO: also learn on episodic tasks (sum over whole sequence) """
state, action, reward = self.ds.getSample()
state = int(state)
action = int(action)
# first learning call has no last state: skip
if self.laststate == None:
self.lastaction = action
self.laststate = state
return
qvalue = self.module.getValue(self.laststate, self.lastaction)
qnext = self.module.getValue(state, action)
self.module.updateValue(self.laststate, self.lastaction, qvalue + self.alpha * (reward + self.gamma * qnext - qvalue))
# move state to oldstate
self.laststate = state
self.lastaction = action |
dwaynebailey/pootle | refs/heads/master | tests/commands/update_tmserver.py | 3 | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import os
import sys
import pytest
from django.core.management import call_command
from django.core.management.base import CommandError
@pytest.mark.cmd
@pytest.mark.django_db
def test_update_tmserver_nosetting(capfd, po_directory, tp0, settings):
"""We need configured TM for anything to work"""
settings.POOTLE_TM_SERVER = None
with pytest.raises(CommandError) as e:
call_command('update_tmserver')
assert "POOTLE_TM_SERVER setting is missing." in str(e)
@pytest.mark.cmd
@pytest.mark.django_db
def __test_update_tmserver_noargs(capfd, tp0, settings):
"""Load TM from the database"""
from pootle_store.models import Unit
units_qs = (
Unit.objects
.exclude(target_f__isnull=True)
.exclude(target_f__exact=''))
settings.POOTLE_TM_SERVER = {
'local': {
'ENGINE': 'pootle.core.search.backends.ElasticSearchBackend',
'HOST': 'elasticsearch',
'PORT': 9200,
'INDEX_NAME': 'translations',
}
}
call_command('update_tmserver')
out, err = capfd.readouterr()
assert "Last indexed revision = -1" in out
assert ("%d translations to index" % units_qs.count()) in out
@pytest.mark.cmd
@pytest.mark.django_db
def test_update_tmserver_bad_tm(capfd, settings):
"""Non-existant TM in the server"""
settings.POOTLE_TM_SERVER = {
'local': {
'ENGINE': 'pootle.core.search.backends.ElasticSearchBackend',
'HOST': 'elasticsearch',
'PORT': 9200,
'INDEX_NAME': 'translations',
}
}
with pytest.raises(CommandError) as e:
call_command('update_tmserver', '--tm=i_dont_exist')
assert "Translation Memory 'i_dont_exist' is not defined" in str(e)
@pytest.mark.cmd
@pytest.mark.django_db
def test_update_tmserver_files_no_displayname(capfd, settings, tmpdir):
"""File based TM needs a display-name"""
settings.POOTLE_TM_SERVER = {
'external': {
'ENGINE': 'pootle.core.search.backends.ElasticSearchBackend',
'HOST': 'elasticsearch',
'PORT': 9200,
'INDEX_NAME': 'translations-external',
}
}
with pytest.raises(CommandError) as e:
call_command('update_tmserver', '--tm=external', 'fake_file.po')
assert "--display-name" in str(e)
@pytest.mark.cmd
@pytest.mark.django_db
@pytest.mark.skipif(sys.platform == 'win32',
reason="No Elasticsearch in Windows testing")
def test_update_tmserver_files(capfd, settings, tmpdir):
"""Load TM from files"""
settings.POOTLE_TM_SERVER = {
'external': {
'ENGINE': 'pootle.core.search.backends.ElasticSearchBackend',
'HOST': 'elasticsearch',
'PORT': 9200,
'INDEX_NAME': 'translations-external',
}
}
p = tmpdir.mkdir("tmserver_files").join("tutorial.po")
p.write("""msgid "rest"
msgstr "test"
""")
# First try without a --target-language (headers in above PO would sort
# this out)
with pytest.raises(CommandError) as e:
call_command('update_tmserver', '--tm=external', '--display-name=Test',
os.path.join(p.dirname, p.basename))
assert "Unable to determine target language" in str(e)
# Now set the --target-language
call_command('update_tmserver', '--tm=external', '--display-name=Test',
'--target-language=af', os.path.join(p.dirname, p.basename))
out, err = capfd.readouterr()
assert "1 translations to index" in out
|
tximikel/kuma | refs/heads/master | vendor/packages/translate/lang/identify.py | 25 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009 Zuza Software Foundation
#
# This file is part of translate.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""
This module contains functions for identifying languages based on language
models.
"""
from os import extsep, path
from translate.lang.ngram import NGram
from translate.misc.file_discovery import get_abs_data_filename
from translate.storage.base import TranslationStore
class LanguageIdentifier(object):
MODEL_DIR = get_abs_data_filename('langmodels')
"""The directory containing the ngram language model files."""
CONF_FILE = 'fpdb.conf'
"""
The name of the file that contains language name-code pairs
(relative to ``MODEL_DIR``).
"""
def __init__(self, model_dir=None, conf_file=None):
if model_dir is None:
model_dir = self.MODEL_DIR
if not path.isdir(model_dir):
raise ValueError('Directory does not exist: %s' % (model_dir))
if conf_file is None:
conf_file = self.CONF_FILE
conf_file = path.abspath(path.join(model_dir, conf_file))
if not path.isfile(conf_file):
raise ValueError('File does not exist: %s' % (conf_file))
self._lang_codes = {}
self._load_config(conf_file)
self.ngram = NGram(model_dir)
def _load_config(self, conf_file):
"""Load the mapping of language names to language codes as given in the
configuration file."""
lines = open(conf_file).read().splitlines()
for line in lines:
parts = line.split()
if not parts or line.startswith('#'):
continue # Skip comment- and empty lines
lname, lcode = parts[0], parts[1]
# Make sure lname is not prefixed by directory names
lname = path.split(lname)[-1]
if extsep in lname:
lname = lname[:lname.rindex(extsep)] # Remove extension if it has
# Remove trailing '[_-]-utf8' from code
if lcode.endswith('-utf8'):
lcode = lcode[:-len('-utf8')]
if lcode.endswith('-') or lcode.endswith('_'):
lcode = lcode[:-1]
self._lang_codes[lname] = lcode
def identify_lang(self, text):
"""Identify the language of the text in the given string."""
if not text:
return None
result = self.ngram.classify(text)
if result in self._lang_codes:
result = self._lang_codes[result]
return result
def identify_source_lang(self, instore):
"""Identify the source language of the given translation store or
units.
:type instore: ``TranslationStore`` or list or tuple of
``TranslationUnit``s.
:param instore: The translation store to extract source text from.
:returns: The identified language's code or ``None`` if the language
could not be identified."""
if not isinstance(instore, (TranslationStore, list, tuple)):
return None
text = u' '.join(unit.source for unit in instore[:50] if unit.istranslatable() and unit.source)
if not text:
return None
return self.identify_lang(text)
def identify_target_lang(self, instore):
"""Identify the target language of the given translation store or
units.
:type instore: ``TranslationStore`` or list or tuple of
``TranslationUnit``s.
:param instore: The translation store to extract target text from.
:returns: The identified language's code or ``None`` if the language
could not be identified."""
if not isinstance(instore, (TranslationStore, list, tuple)):
return None
text = u' '.join(unit.target for unit in instore[:200] if unit.istranslatable() and unit.target)
if not text:
return None
return self.identify_lang(text)
if __name__ == "__main__":
from sys import argv
script_dir = path.abspath(path.dirname(argv[0]))
identifier = LanguageIdentifier()
import locale
encoding = locale.getpreferredencoding()
text = file(argv[1]).read().decode(encoding)
print("Language detected:", identifier.identify_lang(text))
|
Chedi/airflow | refs/heads/master | airflow/operators/mysql_operator.py | 7 | import logging
from airflow.hooks import MySqlHook
from airflow.models import BaseOperator
from airflow.utils import apply_defaults
class MySqlOperator(BaseOperator):
"""
Executes sql code in a specific MySQL database
:param mysql_conn_id: reference to a specific mysql database
:type mysql_conn_id: string
:param sql: the sql code to be executed
:type sql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
"""
template_fields = ('sql',)
template_ext = ('.sql',)
ui_color = '#ededed'
@apply_defaults
def __init__(
self, sql, mysql_conn_id='mysql_default', parameters=None,
autocommit=False, *args, **kwargs):
super(MySqlOperator, self).__init__(*args, **kwargs)
self.mysql_conn_id = mysql_conn_id
self.sql = sql
self.autocommit = autocommit
self.parameters = parameters
def execute(self, context):
logging.info('Executing: ' + str(self.sql))
hook = MySqlHook(mysql_conn_id=self.mysql_conn_id)
hook.run(
self.sql,
autocommit=self.autocommit,
parameters=self.parameters)
|
NaturalGIS/QGIS | refs/heads/master | python/plugins/processing/algs/qgis/KeepNBiggestParts.py | 15 | # -*- coding: utf-8 -*-
"""
***************************************************************************
KeepNBiggestParts.py
---------------------
Date : July 2014
Copyright : (C) 2014 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'July 2014'
__copyright__ = '(C) 2014, Victor Olaya'
from operator import itemgetter
from qgis.core import (QgsGeometry,
QgsFeatureSink,
QgsProcessing,
QgsProcessingException,
QgsProcessingParameterFeatureSink,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterNumber,
)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
class KeepNBiggestParts(QgisAlgorithm):
POLYGONS = 'POLYGONS'
PARTS = 'PARTS'
OUTPUT = 'OUTPUT'
def group(self):
return self.tr('Vector geometry')
def groupId(self):
return 'vectorgeometry'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterFeatureSource(self.POLYGONS,
self.tr('Polygons'), [QgsProcessing.TypeVectorPolygon]))
self.addParameter(QgsProcessingParameterNumber(self.PARTS,
self.tr('Parts to keep'),
QgsProcessingParameterNumber.Integer,
1, False, 1))
self.addParameter(
QgsProcessingParameterFeatureSink(self.OUTPUT, self.tr('Parts'), QgsProcessing.TypeVectorPolygon))
def name(self):
return 'keepnbiggestparts'
def displayName(self):
return self.tr('Keep N biggest parts')
def processAlgorithm(self, parameters, context, feedback):
source = self.parameterAsSource(parameters, self.POLYGONS, context)
if source is None:
raise QgsProcessingException(self.invalidSourceError(parameters, self.POLYGONS))
parts = self.parameterAsInt(parameters, self.PARTS, context)
fields = source.fields()
(sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT, context,
source.fields(), source.wkbType(), source.sourceCrs())
if sink is None:
raise QgsProcessingException(self.invalidSinkError(parameters, self.OUTPUT))
features = source.getFeatures()
total = 100.0 / source.featureCount() if source.featureCount() else 0
for current, feat in enumerate(features):
if feedback.isCanceled():
break
geom = feat.geometry()
if geom.isMultipart():
out_feature = feat
geoms = geom.asGeometryCollection()
geom_area = [(i, geoms[i].area()) for i in range(len(geoms))]
geom_area.sort(key=itemgetter(1))
if parts == 1:
out_feature.setGeometry(geoms[geom_area[-1][0]])
elif parts > len(geoms):
out_feature.setGeometry(geom)
else:
out_feature.setGeometry(geom)
geomres = [geoms[i].asPolygon() for i, a in geom_area[-1 * parts:]]
out_feature.setGeometry(QgsGeometry.fromMultiPolygonXY(geomres))
sink.addFeature(out_feature, QgsFeatureSink.FastInsert)
else:
sink.addFeature(feat, QgsFeatureSink.FastInsert)
feedback.setProgress(int(current * total))
return {self.OUTPUT: dest_id}
|
ahmetdaglarbas/e-commerce | refs/heads/tez | tests/functional/basket/manipulation_tests.py | 54 | from oscar.test.testcases import WebTestCase
from oscar.test import factories
from oscar.apps.basket import models
class TestAddingToBasket(WebTestCase):
def test_works_for_standalone_product(self):
product = factories.ProductFactory()
detail_page = self.get(product.get_absolute_url())
response = detail_page.forms['add_to_basket_form'].submit()
self.assertIsRedirect(response)
baskets = models.Basket.objects.all()
self.assertEqual(1, len(baskets))
basket = baskets[0]
self.assertEqual(1, basket.num_items)
def test_works_for_child_product(self):
parent = factories.ProductFactory(structure='parent', stockrecords=[])
for x in range(3):
factories.ProductFactory(parent=parent, structure='child')
detail_page = self.get(parent.get_absolute_url())
form = detail_page.forms['add_to_basket_form']
response = form.submit()
self.assertIsRedirect(response)
baskets = models.Basket.objects.all()
self.assertEqual(1, len(baskets))
basket = baskets[0]
self.assertEqual(1, basket.num_items)
|
felipenaselva/felipe.repository | refs/heads/master | script.module.schism.common/lib/requests/packages/chardet/langthaimodel.py | 2929 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# The following result for thai was collected from a limited sample (1M).
# Character Mapping Table:
TIS620CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40
188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50
253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60
96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70
209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,
223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,
236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57,
49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54,
45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63,
22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,
11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247,
68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 92.6386%
# first 1024 sequences:7.3177%
# rest sequences: 1.0230%
# negative sequences: 0.0436%
ThaiLangModel = (
0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,
0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,
3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,
0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,
3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,
3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,
3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,
3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,
2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,
3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,
1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,
3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,
1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,
0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,
0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,
2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,
0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,
3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,
2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,
3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,
2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,
3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,
3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,
3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,
3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,
1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,
0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,
0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,
3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,
1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,
3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,
3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,
0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,
0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,
1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,
1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,
3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,
0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,
3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,
0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,
0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,
0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,
0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,
0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,
0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,
0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,
0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,
0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,
3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,
2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,
3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,
1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,
1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
)
TIS620ThaiModel = {
'charToOrderMap': TIS620CharToOrderMap,
'precedenceMatrix': ThaiLangModel,
'mTypicalPositiveRatio': 0.926386,
'keepEnglishLetter': False,
'charsetName': "TIS-620"
}
# flake8: noqa
|
jonathonwalz/ansible | refs/heads/devel | lib/ansible/modules/windows/win_firewall.py | 27 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Michael Eaton <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_firewall
version_added: '2.4'
short_description: Enable or disable the Windows Firewall
description:
- Enable or Disable Windows Firewall profiles.
options:
profiles:
description:
- Specify one or more profiles to change.
choices:
- Domain
- Private
- Public
default: [Domain, Private, Public]
state:
description:
- Set state of firewall for given profile.
choices:
- enabled
- disabled
requirements:
- This module requires Windows Management Framework 5 or later.
author: Michael Eaton (@MichaelEaton83)
'''
EXAMPLES = r'''
- name: Enable firewall for Domain, Public and Private profiles
win_firewall:
state: enabled
profiles:
- Domain
- Private
- Public
tags: enable_firewall
- name: Disable Domain firewall
win_firewall:
state: disabled
profiles:
- Domain
tags: disable_firewall
'''
RETURN = r'''
enabled:
description: current firewall status for chosen profile (after any potential change)
returned: always
type: bool
sample: true
profiles:
description: chosen profile
returned: always
type: string
sample: Domain
state:
description: desired state of the given firewall profile(s)
returned: always
type: list
sample: enabled
'''
|
dispel4py/dispel4py | refs/heads/master | dispel4py/test/simple_process_test.py | 2 | # Copyright (c) The University of Edinburgh 2014
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Tests for simple sequential processing engine.
Using nose (https://nose.readthedocs.org/en/latest/) run as follows::
$ nosetests dispel4py/test/simple_process_test.py
....
----------------------------------------------------------------------
Ran 4 tests in 0.003s
OK
'''
from dispel4py.examples.graph_testing.testing_PEs\
import TestProducer, TestOneInOneOut, TestOneInOneOutWriter, \
TestTwoInOneOut, TestIterative, IntegerProducer, PrintDataConsumer, \
RandomWordProducer, RandomFilter, WordCounter
from dispel4py.new import simple_process
from dispel4py.workflow_graph import WorkflowGraph
from dispel4py.base import create_iterative_chain, CompositePE
from nose import tools
def testPipeline():
prod = TestProducer()
cons1 = TestOneInOneOut()
cons2 = TestOneInOneOut()
graph = WorkflowGraph()
graph.connect(prod, 'output', cons1, 'input')
graph.connect(cons1, 'output', cons2, 'input')
results = simple_process.process_and_return(graph, inputs={prod: 5})
tools.eq_({cons2.id: {'output': list(range(1, 6))}}, results)
def testSquare():
graph = WorkflowGraph()
prod = TestProducer(2)
cons1 = TestOneInOneOut()
cons2 = TestOneInOneOut()
last = TestTwoInOneOut()
graph.connect(prod, 'output0', cons1, 'input')
graph.connect(prod, 'output1', cons2, 'input')
graph.connect(cons1, 'output', last, 'input0')
graph.connect(cons2, 'output', last, 'input1')
results = simple_process.process_and_return(graph, {prod: 1})
tools.eq_({last.id: {'output': ['1', '1']}}, results)
def testTee():
graph = WorkflowGraph()
prod = TestProducer()
cons1 = TestOneInOneOut()
cons2 = TestOneInOneOut()
graph.connect(prod, 'output', cons1, 'input')
graph.connect(prod, 'output', cons2, 'input')
results = simple_process.process_and_return(graph, {prod: 5})
tools.eq_(
{cons1.id: {'output': list(range(1, 6))},
cons2.id: {'output': list(range(1, 6))}},
results)
def testWriter():
graph = WorkflowGraph()
prod = TestProducer()
cons1 = TestOneInOneOutWriter()
graph.connect(prod, 'output', cons1, 'input')
results = simple_process.process_and_return(graph, {prod: 5})
tools.eq_({cons1.id: {'output': list(range(1, 6))}}, results)
def testIterative():
graph = WorkflowGraph()
prod = TestProducer()
cons = TestIterative()
graph.connect(prod, 'output', cons, 'input')
results = simple_process.process_and_return(graph, {prod: 25})
tools.eq_({cons.id: {'output': list(range(1, 26))}}, results)
def testProducer():
graph = WorkflowGraph()
prod = IntegerProducer(5, 234)
cons = TestIterative()
graph.connect(prod, 'output', cons, 'input')
results = simple_process.process_and_return(graph, {prod: 1})
tools.eq_({cons.id: {'output': list(range(5, 234))}}, results)
def testConsumer():
graph = WorkflowGraph()
prod = TestProducer()
cons = PrintDataConsumer()
graph.connect(prod, 'output', cons, 'input')
results = simple_process.process_and_return(graph, {prod: 10})
tools.eq_({}, results)
def testInputsAndOutputs():
graph = WorkflowGraph()
prod = TestProducer()
cons = TestOneInOneOut()
cons._add_output('output', tuple_type=['integer'])
cons._add_input('input', grouping=[0], tuple_type=['integer'])
cons.setInputTypes({'input': ['number']})
tools.eq_({'output': ['number']}, cons.getOutputTypes())
cons._add_output('output2')
try:
cons.getOutputTypes()
except Exception:
pass
graph.connect(prod, 'output', cons, 'input')
results = simple_process.process_and_return(graph, {prod: 10})
tools.eq_({cons.id: {'output': list(range(1, 11))}}, results)
def testCreateChain():
def add(a, b):
return a + b
def mult(a, b):
return a * b
def is_odd(a):
return a % 2 == 1
c = [(add, {'b': 1}), (mult, {'b': 3}), is_odd]
chain = create_iterative_chain(c)
prod = TestProducer()
graph = WorkflowGraph()
graph.connect(prod, 'output', chain, 'input')
graph.flatten()
results = simple_process.process_and_return(graph, {prod: 2})
for key, value in results.items():
tools.eq_({'output': [False, True]}, value)
def testComposite():
comp = CompositePE()
cons1 = TestOneInOneOut()
cons2 = TestOneInOneOut()
comp.connect(cons1, 'output', cons2, 'input')
comp._map_input('comp_input', cons1, 'input')
comp._map_output('comp_output', cons2, 'output')
prod = TestProducer()
cons = TestOneInOneOut()
graph = WorkflowGraph()
graph.connect(prod, 'output', comp, 'comp_input')
graph.connect(comp, 'comp_output', cons, 'input')
graph.flatten()
results = simple_process.process_and_return(graph, {prod: 10})
tools.eq_({cons.id: {'output': list(range(1, 11))}}, results)
def testCompositeWithCreate():
def create_graph(graph):
cons1 = TestOneInOneOut()
cons2 = TestOneInOneOut()
graph.connect(cons1, 'output', cons2, 'input')
graph._map_input('comp_input', cons1, 'input')
graph._map_output('comp_output', cons2, 'output')
comp = CompositePE(create_graph)
prod = TestProducer()
cons = TestOneInOneOut()
graph = WorkflowGraph()
graph.connect(prod, 'output', comp, 'comp_input')
graph.connect(comp, 'comp_output', cons, 'input')
graph.flatten()
results = simple_process.process_and_return(graph, {prod: 10})
tools.eq_({cons.id: {'output': list(range(1, 11))}}, results)
def testCompositeWithCreateParams():
cons1 = TestOneInOneOut()
cons2 = TestOneInOneOut()
def create_graph(graph, connections):
for i in range(connections):
graph.connect(cons1, 'output', cons2, 'input')
comp = CompositePE(create_graph, {'connections': 2})
comp._map_input('comp_input', cons1, 'input')
comp._map_output('comp_output', cons2, 'output')
prod = TestProducer()
cons = TestOneInOneOut()
graph = WorkflowGraph()
graph.connect(prod, 'output', comp, 'comp_input')
graph.connect(comp, 'comp_output', cons, 'input')
graph.flatten()
results = simple_process.process_and_return(graph, {prod: 10})
expected = []
for i in range(1, 11):
expected += [i, i]
tools.eq_({cons.id: {'output': expected}}, results)
def test_process():
prod = TestProducer()
cons = PrintDataConsumer()
graph = WorkflowGraph()
graph.connect(prod, 'output', cons, 'input')
simple_process.process(graph, inputs={prod: 5})
def test_process_input_by_id():
prod = TestProducer()
cons = PrintDataConsumer()
graph = WorkflowGraph()
graph.connect(prod, 'output', cons, 'input')
simple_process.process(graph, inputs={prod.id: 5})
def testWordCount():
prod = RandomWordProducer()
filt = RandomFilter()
count = WordCounter()
graph = WorkflowGraph()
graph.connect(prod, 'output', filt, 'input')
graph.connect(filt, 'output', count, 'input')
simple_process.process(graph, inputs={prod: 100})
|
vivekmishra1991/scikit-learn | refs/heads/master | sklearn/svm/tests/__init__.py | 12133432 | |
andela-ooladayo/django | refs/heads/master | tests/defer_regress/__init__.py | 12133432 | |
lz1988/company-site | refs/heads/master | tests/regressiontests/admin_views/__init__.py | 12133432 | |
repotvsupertuga/tvsupertuga.repository | refs/heads/master | script.module.universalscrapers/lib/universalscrapers/modules/__init__.py | 12133432 | |
rtulke/ceph-deploy | refs/heads/master | ceph_deploy/pkg.py | 6 | import logging
from . import hosts
LOG = logging.getLogger(__name__)
def install(args):
packages = args.install.split(',')
for hostname in args.hosts:
distro = hosts.get(hostname, username=args.username)
LOG.info(
'Distro info: %s %s %s',
distro.name,
distro.release,
distro.codename
)
rlogger = logging.getLogger(hostname)
rlogger.info('installing packages on %s' % hostname)
distro.pkg.install(distro, packages)
distro.conn.exit()
def remove(args):
packages = args.remove.split(',')
for hostname in args.hosts:
distro = hosts.get(hostname, username=args.username)
LOG.info(
'Distro info: %s %s %s',
distro.name,
distro.release,
distro.codename
)
rlogger = logging.getLogger(hostname)
rlogger.info('removing packages from %s' % hostname)
distro.pkg.remove(distro, packages)
distro.conn.exit()
def pkg(args):
if args.install:
install(args)
elif args.remove:
remove(args)
def make(parser):
"""
Manage packages on remote hosts.
"""
parser.add_argument(
'--install',
nargs='?',
metavar='PKG(s)',
help='Comma-separated package(s) to install',
)
parser.add_argument(
'--remove',
nargs='?',
metavar='PKG(s)',
help='Comma-separated package(s) to remove',
)
parser.add_argument(
'hosts',
nargs='+',
)
parser.set_defaults(
func=pkg,
)
|
nikhilraog/boto | refs/heads/develop | boto/mashups/interactive.py | 148 | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distrubuted in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
from __future__ import print_function
import socket
import sys
# windows does not have termios...
try:
import termios
import tty
has_termios = True
except ImportError:
has_termios = False
def interactive_shell(chan):
if has_termios:
posix_shell(chan)
else:
windows_shell(chan)
def posix_shell(chan):
import select
oldtty = termios.tcgetattr(sys.stdin)
try:
tty.setraw(sys.stdin.fileno())
tty.setcbreak(sys.stdin.fileno())
chan.settimeout(0.0)
while True:
r, w, e = select.select([chan, sys.stdin], [], [])
if chan in r:
try:
x = chan.recv(1024)
if len(x) == 0:
print('\r\n*** EOF\r\n', end=' ')
break
sys.stdout.write(x)
sys.stdout.flush()
except socket.timeout:
pass
if sys.stdin in r:
x = sys.stdin.read(1)
if len(x) == 0:
break
chan.send(x)
finally:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, oldtty)
# thanks to Mike Looijmans for this code
def windows_shell(chan):
import threading
sys.stdout.write("Line-buffered terminal emulation. Press F6 or ^Z to send EOF.\r\n\r\n")
def writeall(sock):
while True:
data = sock.recv(256)
if not data:
sys.stdout.write('\r\n*** EOF ***\r\n\r\n')
sys.stdout.flush()
break
sys.stdout.write(data)
sys.stdout.flush()
writer = threading.Thread(target=writeall, args=(chan,))
writer.start()
try:
while True:
d = sys.stdin.read(1)
if not d:
break
chan.send(d)
except EOFError:
# user hit ^Z or F6
pass
|
lathertonj/RemixNoveltyRanker | refs/heads/master | Code/event_histogram.py | 1 | from subprocess import check_output
import numpy as np
import matplotlib.pyplot as plt
def bpms():
f = open('bpms.txt')
bpms = {}
line = f.readline()
while line != "" and line != "\n":
k, v = line.strip().split(',')
bpms[k] = float(v)
line = f.readline()
f.close()
return bpms
bpms = bpms()
def get_event_histogram(release):
release = str(release)
events = check_output(["aubioonset", "-i", "../anjuna_symlinks/"+release, "-O", "kl"])
events = [float(x) for x in events.split()]
# We want to scan for 16ths: units of window are seconds per 16th note
window = 15.0 / bpms[release]
# The first event should be exactly halfway through its window.
x0 = events[0]
events = [x - x0 + (window/2.0) for x in events]
histogram = np.array([0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0])
sum = 0
for e in events:
# Scale events into a 1 measure space
scaled_e = e % (16.0 * window)
# Find which bin this is in
bin_e = int(scaled_e / window)
histogram[bin_e] += 1
sum += 1
return histogram * 1.0 / sum
def plot_histogram(h):
b = plt.bar(np.array([1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]), h, orientation='vertical')
plt.show()
def ph(r):
plot_histogram(get_event_histogram(r))
def find_errors():
for k in bpms:
try:
h = get_event_histogram(k)
except:
print k + " has formatting issues." |
uclouvain/OSIS-Louvain | refs/heads/master | base/migrations/0025_organization_type.py | 3 | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-03-22 15:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0024_documentfile'),
]
operations = [
migrations.AddField(
model_name='organization',
name='type',
field=models.CharField(blank=True, choices=[('MAIN', 'Main'), ('ACADEMIC_PARTNER', 'Academic partner'), ('INDUSTRIAL_PARTNER', 'Industrial partner'), ('SERVICE_PARTNER', 'Service partner'), ('COMMERCE_PARTNER', 'Commerce partner'), ('PUBLIC_PARTNER', 'Public partner')], default='UNKNOWN', max_length=30, null=True),
),
]
|
GitHublong/hue | refs/heads/master | desktop/core/ext-py/cx_Oracle-5.1.2/test/uNumberVar.py | 34 | """Module for testing number variables."""
import cx_Oracle
import decimal
class TestNumberVar(BaseTestCase):
def setUp(self):
BaseTestCase.setUp(self)
self.rawData = []
self.dataByKey = {}
for i in range(1, 11):
numberCol = i + i * 0.25
floatCol = i + i * 0.75
unconstrainedCol = i ** 3 + i * 0.5
if i % 2:
nullableCol = 143L ** i
else:
nullableCol = None
dataTuple = (i, numberCol, floatCol, unconstrainedCol, nullableCol)
self.rawData.append(dataTuple)
self.dataByKey[i] = dataTuple
def testBindDecimal(self):
"test binding in a decimal.Decimal"
self.cursor.execute(u"""
select * from TestNumbers
where NumberCol - :value1 - :value2 = trunc(NumberCol)""",
value1 = decimal.Decimal("0.20"),
value2 = decimal.Decimal("0.05"))
self.failUnlessEqual(self.cursor.fetchall(),
[self.dataByKey[1], self.dataByKey[5], self.dataByKey[9]])
def testBindFloat(self):
"test binding in a float"
self.cursor.execute(u"""
select * from TestNumbers
where NumberCol - :value = trunc(NumberCol)""",
value = 0.25)
self.failUnlessEqual(self.cursor.fetchall(),
[self.dataByKey[1], self.dataByKey[5], self.dataByKey[9]])
def testBindInteger(self):
"test binding in an integer"
self.cursor.execute(u"""
select * from TestNumbers
where IntCol = :value""",
value = 2)
self.failUnlessEqual(self.cursor.fetchall(), [self.dataByKey[2]])
def testBindSmallLong(self):
"test binding in a small long integer"
self.cursor.execute(u"""
select * from TestNumbers
where IntCol = :value""",
value = 3L)
self.failUnlessEqual(self.cursor.fetchall(), [self.dataByKey[3]])
def testBindLargeLong(self):
"test binding in a large long integer"
valueVar = self.cursor.var(cx_Oracle.NUMBER)
valueVar.setvalue(0, 6088343244)
self.cursor.execute(u"""
begin
:value := :value + 5;
end;""",
value = valueVar)
value = valueVar.getvalue()
self.failUnlessEqual(value, 6088343249)
def testBindIntegerAfterString(self):
"test binding in an number after setting input sizes to a string"
self.cursor.setinputsizes(value = 15)
self.cursor.execute(u"""
select * from TestNumbers
where IntCol = :value""",
value = 3)
self.failUnlessEqual(self.cursor.fetchall(), [self.dataByKey[3]])
def testBindNull(self):
"test binding in a null"
self.cursor.execute(u"""
select * from TestNumbers
where IntCol = :value""",
value = None)
self.failUnlessEqual(self.cursor.fetchall(), [])
def testBindNumberArrayDirect(self):
"test binding in a number array"
returnValue = self.cursor.var(cx_Oracle.NUMBER)
array = [r[1] for r in self.rawData]
statement = u"""
begin
:p_ReturnValue := pkg_TestNumberArrays.TestInArrays(
:p_StartValue, :p_Array);
end;"""
self.cursor.execute(statement,
p_ReturnValue = returnValue,
p_StartValue = 5,
p_Array = array)
self.failUnlessEqual(returnValue.getvalue(), 73.75)
array = range(15)
self.cursor.execute(statement,
p_StartValue = 10,
p_Array = array)
self.failUnlessEqual(returnValue.getvalue(), 115.0)
def testBindNumberArrayBySizes(self):
"test binding in a number array (with setinputsizes)"
returnValue = self.cursor.var(cx_Oracle.NUMBER)
self.cursor.setinputsizes(p_Array = [cx_Oracle.NUMBER, 10])
array = [r[1] for r in self.rawData]
self.cursor.execute(u"""
begin
:p_ReturnValue := pkg_TestNumberArrays.TestInArrays(
:p_StartValue, :p_Array);
end;""",
p_ReturnValue = returnValue,
p_StartValue = 6,
p_Array = array)
self.failUnlessEqual(returnValue.getvalue(), 74.75)
def testBindNumberArrayByVar(self):
"test binding in a number array (with arrayvar)"
returnValue = self.cursor.var(cx_Oracle.NUMBER)
array = self.cursor.arrayvar(cx_Oracle.NUMBER,
[r[1] for r in self.rawData])
array.setvalue(0, [r[1] for r in self.rawData])
self.cursor.execute(u"""
begin
:p_ReturnValue := pkg_TestNumberArrays.TestInArrays(
:p_IntegerValue, :p_Array);
end;""",
p_ReturnValue = returnValue,
p_IntegerValue = 7,
p_Array = array)
self.failUnlessEqual(returnValue.getvalue(), 75.75)
def testBindZeroLengthNumberArrayByVar(self):
"test binding in a zero length number array (with arrayvar)"
returnValue = self.cursor.var(cx_Oracle.NUMBER)
array = self.cursor.arrayvar(cx_Oracle.NUMBER, 0)
self.cursor.execute(u"""
begin
:p_ReturnValue := pkg_TestNumberArrays.TestInArrays(
:p_IntegerValue, :p_Array);
end;""",
p_ReturnValue = returnValue,
p_IntegerValue = 8,
p_Array = array)
self.failUnlessEqual(returnValue.getvalue(), 8.0)
self.failUnlessEqual(array.getvalue(), [])
def testBindInOutNumberArrayByVar(self):
"test binding in/out a number array (with arrayvar)"
array = self.cursor.arrayvar(cx_Oracle.NUMBER, 10)
originalData = [r[1] for r in self.rawData]
expectedData = [originalData[i - 1] * 10 for i in range(1, 6)] + \
originalData[5:]
array.setvalue(0, originalData)
self.cursor.execute(u"""
begin
pkg_TestNumberArrays.TestInOutArrays(:p_NumElems, :p_Array);
end;""",
p_NumElems = 5,
p_Array = array)
self.failUnlessEqual(array.getvalue(), expectedData)
def testBindOutNumberArrayByVar(self):
"test binding out a Number array (with arrayvar)"
array = self.cursor.arrayvar(cx_Oracle.NUMBER, 6)
expectedData = [i * 100 for i in range(1, 7)]
self.cursor.execute(u"""
begin
pkg_TestNumberArrays.TestOutArrays(:p_NumElems, :p_Array);
end;""",
p_NumElems = 6,
p_Array = array)
self.failUnlessEqual(array.getvalue(), expectedData)
def testBindOutSetInputSizes(self):
"test binding out with set input sizes defined"
vars = self.cursor.setinputsizes(value = cx_Oracle.NUMBER)
self.cursor.execute(u"""
begin
:value := 5;
end;""")
self.failUnlessEqual(vars["value"].getvalue(), 5)
def testBindInOutSetInputSizes(self):
"test binding in/out with set input sizes defined"
vars = self.cursor.setinputsizes(value = cx_Oracle.NUMBER)
self.cursor.execute(u"""
begin
:value := :value + 5;
end;""",
value = 1.25)
self.failUnlessEqual(vars["value"].getvalue(), 6.25)
def testBindOutVar(self):
"test binding out with cursor.var() method"
var = self.cursor.var(cx_Oracle.NUMBER)
self.cursor.execute(u"""
begin
:value := 5;
end;""",
value = var)
self.failUnlessEqual(var.getvalue(), 5)
def testBindInOutVarDirectSet(self):
"test binding in/out with cursor.var() method"
var = self.cursor.var(cx_Oracle.NUMBER)
var.setvalue(0, 2.25)
self.cursor.execute(u"""
begin
:value := :value + 5;
end;""",
value = var)
self.failUnlessEqual(var.getvalue(), 7.25)
def testCursorDescription(self):
"test cursor description is accurate"
self.cursor.execute(u"select * from TestNumbers")
self.failUnlessEqual(self.cursor.description,
[ (u'INTCOL', cx_Oracle.NUMBER, 10, 22, 9, 0, 0),
(u'NUMBERCOL', cx_Oracle.NUMBER, 13, 22, 9, 2, 0),
(u'FLOATCOL', cx_Oracle.NUMBER, 127, 22, 126, -127, 0),
(u'UNCONSTRAINEDCOL', cx_Oracle.NUMBER, 127, 22, 0, -127, 0),
(u'NULLABLECOL', cx_Oracle.NUMBER, 39, 22, 38, 0, 1) ])
def testFetchAll(self):
"test that fetching all of the data returns the correct results"
self.cursor.execute(u"select * From TestNumbers order by IntCol")
self.failUnlessEqual(self.cursor.fetchall(), self.rawData)
self.failUnlessEqual(self.cursor.fetchall(), [])
def testFetchMany(self):
"test that fetching data in chunks returns the correct results"
self.cursor.execute(u"select * From TestNumbers order by IntCol")
self.failUnlessEqual(self.cursor.fetchmany(3), self.rawData[0:3])
self.failUnlessEqual(self.cursor.fetchmany(2), self.rawData[3:5])
self.failUnlessEqual(self.cursor.fetchmany(4), self.rawData[5:9])
self.failUnlessEqual(self.cursor.fetchmany(3), self.rawData[9:])
self.failUnlessEqual(self.cursor.fetchmany(3), [])
def testFetchOne(self):
"test that fetching a single row returns the correct results"
self.cursor.execute(u"""
select *
from TestNumbers
where IntCol in (3, 4)
order by IntCol""")
self.failUnlessEqual(self.cursor.fetchone(), self.dataByKey[3])
self.failUnlessEqual(self.cursor.fetchone(), self.dataByKey[4])
self.failUnlessEqual(self.cursor.fetchone(), None)
def testReturnAsLong(self):
"test that fetching a long integer returns such in Python"
self.cursor.execute(u"""
select NullableCol
from TestNumbers
where IntCol = 9""")
col, = self.cursor.fetchone()
self.failUnless(isinstance(col, long), "long integer not returned")
def testReturnAsFloat(self):
"test that fetching a floating point number returns such in Python"
self.cursor.execute(u"select 1.25 from dual")
result, = self.cursor.fetchone()
self.failUnlessEqual(result, 1.25)
|
darcyliu/storyboard | refs/heads/master | markdown/extensions/def_list.py | 31 | #!/usr/bin/env python
"""
Definition List Extension for Python-Markdown
=============================================
Added parsing of Definition Lists to Python-Markdown.
A simple example:
Apple
: Pomaceous fruit of plants of the genus Malus in
the family Rosaceae.
: An american computer company.
Orange
: The fruit of an evergreen tree of the genus Citrus.
Copyright 2008 - [Waylan Limberg](http://achinghead.com)
"""
import re
import markdown
from markdown.util import etree
class DefListProcessor(markdown.blockprocessors.BlockProcessor):
""" Process Definition Lists. """
RE = re.compile(r'(^|\n)[ ]{0,3}:[ ]{1,3}(.*?)(\n|$)')
NO_INDENT_RE = re.compile(r'^[ ]{0,3}[^ :]')
def test(self, parent, block):
return bool(self.RE.search(block))
def run(self, parent, blocks):
block = blocks.pop(0)
m = self.RE.search(block)
terms = [l.strip() for l in block[:m.start()].split('\n') if l.strip()]
block = block[m.end():]
no_indent = self.NO_INDENT_RE.match(block)
if no_indent:
d, theRest = (block, None)
else:
d, theRest = self.detab(block)
if d:
d = '%s\n%s' % (m.group(2), d)
else:
d = m.group(2)
sibling = self.lastChild(parent)
if not terms and sibling.tag == 'p':
# The previous paragraph contains the terms
state = 'looselist'
terms = sibling.text.split('\n')
parent.remove(sibling)
# Aquire new sibling
sibling = self.lastChild(parent)
else:
state = 'list'
if sibling and sibling.tag == 'dl':
# This is another item on an existing list
dl = sibling
if len(dl) and dl[-1].tag == 'dd' and len(dl[-1]):
state = 'looselist'
else:
# This is a new list
dl = etree.SubElement(parent, 'dl')
# Add terms
for term in terms:
dt = etree.SubElement(dl, 'dt')
dt.text = term
# Add definition
self.parser.state.set(state)
dd = etree.SubElement(dl, 'dd')
self.parser.parseBlocks(dd, [d])
self.parser.state.reset()
if theRest:
blocks.insert(0, theRest)
class DefListIndentProcessor(markdown.blockprocessors.ListIndentProcessor):
""" Process indented children of definition list items. """
ITEM_TYPES = ['dd']
LIST_TYPES = ['dl']
def create_item(self, parent, block):
""" Create a new dd and parse the block with it as the parent. """
dd = markdown.etree.SubElement(parent, 'dd')
self.parser.parseBlocks(dd, [block])
class DefListExtension(markdown.Extension):
""" Add definition lists to Markdown. """
def extendMarkdown(self, md, md_globals):
""" Add an instance of DefListProcessor to BlockParser. """
md.parser.blockprocessors.add('defindent',
DefListIndentProcessor(md.parser),
'>indent')
md.parser.blockprocessors.add('deflist',
DefListProcessor(md.parser),
'>ulist')
def makeExtension(configs={}):
return DefListExtension(configs=configs)
|
shyamalschandra/scikit-learn | refs/heads/master | examples/linear_model/plot_logistic_multinomial.py | 24 | """
====================================================
Plot multinomial and One-vs-Rest Logistic Regression
====================================================
Plot decision surface of multinomial and One-vs-Rest Logistic Regression.
The hyperplanes corresponding to the three One-vs-Rest (OVR) classifiers
are represented by the dashed lines.
"""
print(__doc__)
# Authors: Tom Dupre la Tour <[email protected]>
# Licence: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import make_blobs
from sklearn.linear_model import LogisticRegression
# make 3-class dataset for classification
centers = [[-5, 0], [0, 1.5], [5, -1]]
X, y = make_blobs(n_samples=1000, centers=centers, random_state=40)
transformation = [[0.4, 0.2], [-0.4, 1.2]]
X = np.dot(X, transformation)
for multi_class in ('multinomial', 'ovr'):
clf = LogisticRegression(solver='sag', max_iter=100, random_state=42,
multi_class=multi_class).fit(X, y)
# print the training scores
print("training score : %.3f (%s)" % (clf.score(X, y), multi_class))
# create a mesh to plot in
h = .02 # step size in the mesh
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, m_max]x[y_min, y_max].
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.figure()
plt.contourf(xx, yy, Z, cmap=plt.cm.Paired)
plt.title("Decision surface of LogisticRegression (%s)" % multi_class)
plt.axis('tight')
# Plot also the training points
colors = "bry"
for i, color in zip(clf.classes_, colors):
idx = np.where(y == i)
plt.scatter(X[idx, 0], X[idx, 1], c=color, cmap=plt.cm.Paired)
# Plot the three one-against-all classifiers
xmin, xmax = plt.xlim()
ymin, ymax = plt.ylim()
coef = clf.coef_
intercept = clf.intercept_
def plot_hyperplane(c, color):
def line(x0):
return (-(x0 * coef[c, 0]) - intercept[c]) / coef[c, 1]
plt.plot([xmin, xmax], [line(xmin), line(xmax)],
ls="--", color=color)
for i, color in zip(clf.classes_, colors):
plot_hyperplane(i, color)
plt.show()
|
gregdek/ansible | refs/heads/devel | lib/ansible/modules/utilities/logic/include_vars.py | 26 | # -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'
}
DOCUMENTATION = '''
---
author: Allen Sanabria (@linuxdynasty)
module: include_vars
short_description: Load variables from files, dynamically within a task
description:
- Loads YAML/JSON variables dynamically from a file or directory, recursively, during task runtime.
- If loading a directory, the files are sorted alphabetically before being loaded.
- This module is also supported for Windows targets.
- To assign included variables to a different host than C(inventory_hostname),
use C(delegate_to) and set L(delegate_facts=True,../user_guide/playbooks_delegate.html#delegated-facts).
version_added: "1.4"
options:
file:
version_added: "2.2"
description:
- The file name from which variables should be loaded.
- If the path is relative, it will look for the file in vars/ subdirectory of a role or relative to playbook.
dir:
version_added: "2.2"
description:
- The directory name from which the variables should be loaded.
- If the path is relative, it will look for the file in vars/ subdirectory of a role or relative to playbook.
name:
version_added: "2.2"
description:
- The name of a variable into which assign the included vars. If omitted (null) they will be made top level vars.
depth:
version_added: "2.2"
description:
- When using C(dir), this module will, by default, recursively go through each sub directory and load up the
variables. By explicitly setting the depth, this module will only go as deep as the depth.
default: 0
files_matching:
version_added: "2.2"
description:
- Limit the files that are loaded within any directory to this regular expression.
ignore_files:
version_added: "2.2"
description:
- List of file names to ignore.
extensions:
version_added: "2.3"
description:
- List of file extensions to read when using C(dir).
default: [yaml, yml, json]
ignore_unknown_extensions:
version_added: "2.7"
description:
- Ignore unknown file extensions within the directory. This allows users to specify a directory containing vars files
that are intermingled with non vars files extension types (For example, a directory with a README in it and vars files)
default: False
free-form:
description:
- This module allows you to specify the 'file' option directly without any other options.
There is no 'free-form' option, this is just an indicator, see example below.
notes:
- This module is also supported for Windows targets.
'''
EXAMPLES = """
- name: Include vars of stuff.yaml into the 'stuff' variable (2.2).
include_vars:
file: stuff.yaml
name: stuff
- name: Conditionally decide to load in variables into 'plans' when x is 0, otherwise do not. (2.2)
include_vars:
file: contingency_plan.yaml
name: plans
when: x == 0
- name: Load a variable file based on the OS type, or a default if not found. Using free-form to specify the file.
include_vars: "{{ lookup('first_found', possible_files) }}"
vars:
possible_files:
- "{{ ansible_distribution }}.yaml"
- "{{ ansible_os_family }}.yaml"
- default.yaml
- name: Bare include (free-form)
include_vars: myvars.yaml
- name: Include all .json and .jsn files in vars/all and all nested directories (2.3)
include_vars:
dir: vars/all
extensions:
- json
- jsn
- name: Include all default extension files in vars/all and all nested directories and save the output in test. (2.2)
include_vars:
dir: vars/all
name: test
- name: Include default extension files in vars/services (2.2)
include_vars:
dir: vars/services
depth: 1
- name: Include only files matching bastion.yaml (2.2)
include_vars:
dir: vars
files_matching: bastion.yaml
- name: Include all .yaml files except bastion.yaml (2.3)
include_vars:
dir: vars
ignore_files: [bastion.yaml]
extensions: [yaml]
"""
RETURN = '''
ansible_facts:
description: Variables that were included and their values
returned: success
type: dict
sample: {'variable': 'value'}
ansible_included_var_files:
description: A list of files that were successfully included
returned: success
type: list
sample: [ '/path/to/file.yaml', '/path/to/file.json' ]
version_added: 2.4
'''
|
priyesh16/thesis | refs/heads/252B_start | src/network/test/examples-to-run.py | 129 | #! /usr/bin/env python
## -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
# A list of C++ examples to run in order to ensure that they remain
# buildable and runnable over time. Each tuple in the list contains
#
# (example_name, do_run, do_valgrind_run).
#
# See test.py for more information.
cpp_examples = [
("main-packet-header", "True", "True"),
("main-packet-tag", "True", "True"),
("red-tests", "True", "True"),
]
# A list of Python examples to run in order to ensure that they remain
# runnable over time. Each tuple in the list contains
#
# (example_name, do_run).
#
# See test.py for more information.
python_examples = []
|
shaufi10/odoo | refs/heads/8.0 | addons/crm/res_config.py | 361 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-TODAY OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import SUPERUSER_ID
from openerp.osv import fields, osv
class crm_configuration(osv.TransientModel):
_name = 'sale.config.settings'
_inherit = ['sale.config.settings', 'fetchmail.config.settings']
_columns = {
'group_fund_raising': fields.boolean("Manage Fund Raising",
implied_group='crm.group_fund_raising',
help="""Allows you to trace and manage your activities for fund raising."""),
'module_crm_claim': fields.boolean("Manage Customer Claims",
help='Allows you to track your customers/suppliers claims and grievances.\n'
'-This installs the module crm_claim.'),
'module_crm_helpdesk': fields.boolean("Manage Helpdesk and Support",
help='Allows you to communicate with Customer, process Customer query, and provide better help and support.\n'
'-This installs the module crm_helpdesk.'),
'alias_prefix': fields.char('Default Alias Name for Leads'),
'alias_domain' : fields.char('Alias Domain'),
'group_scheduled_calls': fields.boolean("Schedule calls to manage call center",
implied_group='crm.group_scheduled_calls',
help="""This adds the menu 'Scheduled Calls' under 'Sales / Phone Calls'""")
}
_defaults = {
'alias_domain': lambda self, cr, uid, context: self.pool['mail.alias']._get_alias_domain(cr, SUPERUSER_ID, [1], None, None)[1],
}
def _find_default_lead_alias_id(self, cr, uid, context=None):
alias_id = self.pool['ir.model.data'].xmlid_to_res_id(cr, uid, 'crm.mail_alias_lead_info')
if not alias_id:
alias_ids = self.pool['mail.alias'].search(
cr, uid, [
('alias_model_id.model', '=', 'crm.lead'),
('alias_force_thread_id', '=', False),
('alias_parent_model_id.model', '=', 'crm.case.section'),
('alias_parent_thread_id', '=', False),
('alias_defaults', '=', '{}')
], context=context)
alias_id = alias_ids and alias_ids[0] or False
return alias_id
def get_default_alias_prefix(self, cr, uid, ids, context=None):
alias_name = False
alias_id = self._find_default_lead_alias_id(cr, uid, context=context)
if alias_id:
alias_name = self.pool['mail.alias'].browse(cr, uid, alias_id, context=context).alias_name
return {'alias_prefix': alias_name}
def set_default_alias_prefix(self, cr, uid, ids, context=None):
mail_alias = self.pool['mail.alias']
for record in self.browse(cr, uid, ids, context=context):
alias_id = self._find_default_lead_alias_id(cr, uid, context=context)
if not alias_id:
create_ctx = dict(context, alias_model_name='crm.lead', alias_parent_model_name='crm.case.section')
alias_id = self.pool['mail.alias'].create(cr, uid, {'alias_name': record.alias_prefix}, context=create_ctx)
else:
mail_alias.write(cr, uid, alias_id, {'alias_name': record.alias_prefix}, context=context)
return True
|
kaulkie/keyczar | refs/heads/master | cpp/src/tools/scons/scons-local-1.2.0.d20090223/SCons/compat/_scons_optparse.py | 19 | """optparse - a powerful, extensible, and easy-to-use option parser.
By Greg Ward <[email protected]>
Originally distributed as Optik; see http://optik.sourceforge.net/ .
If you have problems with this module, please do not file bugs,
patches, or feature requests with Python; instead, use Optik's
SourceForge project page:
http://sourceforge.net/projects/optik
For support, use the [email protected] mailing list
(http://lists.sourceforge.net/lists/listinfo/optik-users).
"""
# Python developers: please do not make changes to this file, since
# it is automatically generated from the Optik source code.
__version__ = "1.5.3"
__all__ = ['Option',
'SUPPRESS_HELP',
'SUPPRESS_USAGE',
'Values',
'OptionContainer',
'OptionGroup',
'OptionParser',
'HelpFormatter',
'IndentedHelpFormatter',
'TitledHelpFormatter',
'OptParseError',
'OptionError',
'OptionConflictError',
'OptionValueError',
'BadOptionError']
__copyright__ = """
Copyright (c) 2001-2006 Gregory P. Ward. All rights reserved.
Copyright (c) 2002-2006 Python Software Foundation. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the author nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import string
import sys, os
import types
import textwrap
def _repr(self):
return "<%s at 0x%x: %s>" % (self.__class__.__name__, id(self), self)
try:
sys.getdefaultencoding
except AttributeError:
def fake_getdefaultencoding():
return None
sys.getdefaultencoding = fake_getdefaultencoding
try:
''.encode
except AttributeError:
def encode_wrapper(s, encoding, replacement):
return s
else:
def encode_wrapper(s, encoding, replacement):
return s.encode(encoding, replacement)
# This file was generated from:
# Id: option_parser.py 527 2006-07-23 15:21:30Z greg
# Id: option.py 522 2006-06-11 16:22:03Z gward
# Id: help.py 527 2006-07-23 15:21:30Z greg
# Id: errors.py 509 2006-04-20 00:58:24Z gward
try:
from gettext import gettext
except ImportError:
def gettext(message):
return message
_ = gettext
class OptParseError (Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class OptionError (OptParseError):
"""
Raised if an Option instance is created with invalid or
inconsistent arguments.
"""
def __init__(self, msg, option):
self.msg = msg
self.option_id = str(option)
def __str__(self):
if self.option_id:
return "option %s: %s" % (self.option_id, self.msg)
else:
return self.msg
class OptionConflictError (OptionError):
"""
Raised if conflicting options are added to an OptionParser.
"""
class OptionValueError (OptParseError):
"""
Raised if an invalid option value is encountered on the command
line.
"""
class BadOptionError (OptParseError):
"""
Raised if an invalid option is seen on the command line.
"""
def __init__(self, opt_str):
self.opt_str = opt_str
def __str__(self):
return _("no such option: %s") % self.opt_str
class AmbiguousOptionError (BadOptionError):
"""
Raised if an ambiguous option is seen on the command line.
"""
def __init__(self, opt_str, possibilities):
BadOptionError.__init__(self, opt_str)
self.possibilities = possibilities
def __str__(self):
return (_("ambiguous option: %s (%s?)")
% (self.opt_str, string.join(self.possibilities, ", ")))
class HelpFormatter:
"""
Abstract base class for formatting option help. OptionParser
instances should use one of the HelpFormatter subclasses for
formatting help; by default IndentedHelpFormatter is used.
Instance attributes:
parser : OptionParser
the controlling OptionParser instance
indent_increment : int
the number of columns to indent per nesting level
max_help_position : int
the maximum starting column for option help text
help_position : int
the calculated starting column for option help text;
initially the same as the maximum
width : int
total number of columns for output (pass None to constructor for
this value to be taken from the $COLUMNS environment variable)
level : int
current indentation level
current_indent : int
current indentation level (in columns)
help_width : int
number of columns available for option help text (calculated)
default_tag : str
text to replace with each option's default value, "%default"
by default. Set to false value to disable default value expansion.
option_strings : { Option : str }
maps Option instances to the snippet of help text explaining
the syntax of that option, e.g. "-h, --help" or
"-fFILE, --file=FILE"
_short_opt_fmt : str
format string controlling how short options with values are
printed in help text. Must be either "%s%s" ("-fFILE") or
"%s %s" ("-f FILE"), because those are the two syntaxes that
Optik supports.
_long_opt_fmt : str
similar but for long options; must be either "%s %s" ("--file FILE")
or "%s=%s" ("--file=FILE").
"""
NO_DEFAULT_VALUE = "none"
def __init__(self,
indent_increment,
max_help_position,
width,
short_first):
self.parser = None
self.indent_increment = indent_increment
self.help_position = self.max_help_position = max_help_position
if width is None:
try:
width = int(os.environ['COLUMNS'])
except (KeyError, ValueError):
width = 80
width = width - 2
self.width = width
self.current_indent = 0
self.level = 0
self.help_width = None # computed later
self.short_first = short_first
self.default_tag = "%default"
self.option_strings = {}
self._short_opt_fmt = "%s %s"
self._long_opt_fmt = "%s=%s"
def set_parser(self, parser):
self.parser = parser
def set_short_opt_delimiter(self, delim):
if delim not in ("", " "):
raise ValueError(
"invalid metavar delimiter for short options: %r" % delim)
self._short_opt_fmt = "%s" + delim + "%s"
def set_long_opt_delimiter(self, delim):
if delim not in ("=", " "):
raise ValueError(
"invalid metavar delimiter for long options: %r" % delim)
self._long_opt_fmt = "%s" + delim + "%s"
def indent(self):
self.current_indent = self.current_indent + self.indent_increment
self.level = self.level + 1
def dedent(self):
self.current_indent = self.current_indent - self.indent_increment
assert self.current_indent >= 0, "Indent decreased below 0."
self.level = self.level - 1
def format_usage(self, usage):
raise NotImplementedError, "subclasses must implement"
def format_heading(self, heading):
raise NotImplementedError, "subclasses must implement"
def _format_text(self, text):
"""
Format a paragraph of free-form text for inclusion in the
help output at the current indentation level.
"""
text_width = self.width - self.current_indent
indent = " "*self.current_indent
return textwrap.fill(text,
text_width,
initial_indent=indent,
subsequent_indent=indent)
def format_description(self, description):
if description:
return self._format_text(description) + "\n"
else:
return ""
def format_epilog(self, epilog):
if epilog:
return "\n" + self._format_text(epilog) + "\n"
else:
return ""
def expand_default(self, option):
if self.parser is None or not self.default_tag:
return option.help
default_value = self.parser.defaults.get(option.dest)
if default_value is NO_DEFAULT or default_value is None:
default_value = self.NO_DEFAULT_VALUE
return string.replace(option.help, self.default_tag, str(default_value))
def format_option(self, option):
# The help for each option consists of two parts:
# * the opt strings and metavars
# eg. ("-x", or "-fFILENAME, --file=FILENAME")
# * the user-supplied help string
# eg. ("turn on expert mode", "read data from FILENAME")
#
# If possible, we write both of these on the same line:
# -x turn on expert mode
#
# But if the opt string list is too long, we put the help
# string on a second line, indented to the same column it would
# start in if it fit on the first line.
# -fFILENAME, --file=FILENAME
# read data from FILENAME
result = []
opts = self.option_strings[option]
opt_width = self.help_position - self.current_indent - 2
if len(opts) > opt_width:
opts = "%*s%s\n" % (self.current_indent, "", opts)
indent_first = self.help_position
else: # start help on same line as opts
opts = "%*s%-*s " % (self.current_indent, "", opt_width, opts)
indent_first = 0
result.append(opts)
if option.help:
help_text = self.expand_default(option)
help_lines = textwrap.wrap(help_text, self.help_width)
result.append("%*s%s\n" % (indent_first, "", help_lines[0]))
for line in help_lines[1:]:
result.append("%*s%s\n" % (self.help_position, "", line))
elif opts[-1] != "\n":
result.append("\n")
return string.join(result, "")
def store_option_strings(self, parser):
self.indent()
max_len = 0
for opt in parser.option_list:
strings = self.format_option_strings(opt)
self.option_strings[opt] = strings
max_len = max(max_len, len(strings) + self.current_indent)
self.indent()
for group in parser.option_groups:
for opt in group.option_list:
strings = self.format_option_strings(opt)
self.option_strings[opt] = strings
max_len = max(max_len, len(strings) + self.current_indent)
self.dedent()
self.dedent()
self.help_position = min(max_len + 2, self.max_help_position)
self.help_width = self.width - self.help_position
def format_option_strings(self, option):
"""Return a comma-separated list of option strings & metavariables."""
if option.takes_value():
metavar = option.metavar or string.upper(option.dest)
short_opts = []
for sopt in option._short_opts:
short_opts.append(self._short_opt_fmt % (sopt, metavar))
long_opts = []
for lopt in option._long_opts:
long_opts.append(self._long_opt_fmt % (lopt, metavar))
else:
short_opts = option._short_opts
long_opts = option._long_opts
if self.short_first:
opts = short_opts + long_opts
else:
opts = long_opts + short_opts
return string.join(opts, ", ")
class IndentedHelpFormatter (HelpFormatter):
"""Format help with indented section bodies.
"""
def __init__(self,
indent_increment=2,
max_help_position=24,
width=None,
short_first=1):
HelpFormatter.__init__(
self, indent_increment, max_help_position, width, short_first)
def format_usage(self, usage):
return _("Usage: %s\n") % usage
def format_heading(self, heading):
return "%*s%s:\n" % (self.current_indent, "", heading)
class TitledHelpFormatter (HelpFormatter):
"""Format help with underlined section headers.
"""
def __init__(self,
indent_increment=0,
max_help_position=24,
width=None,
short_first=0):
HelpFormatter.__init__ (
self, indent_increment, max_help_position, width, short_first)
def format_usage(self, usage):
return "%s %s\n" % (self.format_heading(_("Usage")), usage)
def format_heading(self, heading):
return "%s\n%s\n" % (heading, "=-"[self.level] * len(heading))
def _parse_num(val, type):
if string.lower(val[:2]) == "0x": # hexadecimal
radix = 16
elif string.lower(val[:2]) == "0b": # binary
radix = 2
val = val[2:] or "0" # have to remove "0b" prefix
elif val[:1] == "0": # octal
radix = 8
else: # decimal
radix = 10
return type(val, radix)
def _parse_int(val):
return _parse_num(val, int)
def _parse_long(val):
return _parse_num(val, long)
try:
int('0', 10)
except TypeError:
# Python 1.5.2 doesn't allow a radix value to be passed to int().
_parse_int = int
try:
long('0', 10)
except TypeError:
# Python 1.5.2 doesn't allow a radix value to be passed to long().
_parse_long = long
_builtin_cvt = { "int" : (_parse_int, _("integer")),
"long" : (_parse_long, _("long integer")),
"float" : (float, _("floating-point")),
"complex" : (complex, _("complex")) }
def check_builtin(option, opt, value):
(cvt, what) = _builtin_cvt[option.type]
try:
return cvt(value)
except ValueError:
raise OptionValueError(
_("option %s: invalid %s value: %r") % (opt, what, value))
def check_choice(option, opt, value):
if value in option.choices:
return value
else:
choices = string.join(map(repr, option.choices), ", ")
raise OptionValueError(
_("option %s: invalid choice: %r (choose from %s)")
% (opt, value, choices))
# Not supplying a default is different from a default of None,
# so we need an explicit "not supplied" value.
NO_DEFAULT = ("NO", "DEFAULT")
class Option:
"""
Instance attributes:
_short_opts : [string]
_long_opts : [string]
action : string
type : string
dest : string
default : any
nargs : int
const : any
choices : [string]
callback : function
callback_args : (any*)
callback_kwargs : { string : any }
help : string
metavar : string
"""
# The list of instance attributes that may be set through
# keyword args to the constructor.
ATTRS = ['action',
'type',
'dest',
'default',
'nargs',
'const',
'choices',
'callback',
'callback_args',
'callback_kwargs',
'help',
'metavar']
# The set of actions allowed by option parsers. Explicitly listed
# here so the constructor can validate its arguments.
ACTIONS = ("store",
"store_const",
"store_true",
"store_false",
"append",
"append_const",
"count",
"callback",
"help",
"version")
# The set of actions that involve storing a value somewhere;
# also listed just for constructor argument validation. (If
# the action is one of these, there must be a destination.)
STORE_ACTIONS = ("store",
"store_const",
"store_true",
"store_false",
"append",
"append_const",
"count")
# The set of actions for which it makes sense to supply a value
# type, ie. which may consume an argument from the command line.
TYPED_ACTIONS = ("store",
"append",
"callback")
# The set of actions which *require* a value type, ie. that
# always consume an argument from the command line.
ALWAYS_TYPED_ACTIONS = ("store",
"append")
# The set of actions which take a 'const' attribute.
CONST_ACTIONS = ("store_const",
"append_const")
# The set of known types for option parsers. Again, listed here for
# constructor argument validation.
TYPES = ("string", "int", "long", "float", "complex", "choice")
# Dictionary of argument checking functions, which convert and
# validate option arguments according to the option type.
#
# Signature of checking functions is:
# check(option : Option, opt : string, value : string) -> any
# where
# option is the Option instance calling the checker
# opt is the actual option seen on the command-line
# (eg. "-a", "--file")
# value is the option argument seen on the command-line
#
# The return value should be in the appropriate Python type
# for option.type -- eg. an integer if option.type == "int".
#
# If no checker is defined for a type, arguments will be
# unchecked and remain strings.
TYPE_CHECKER = { "int" : check_builtin,
"long" : check_builtin,
"float" : check_builtin,
"complex": check_builtin,
"choice" : check_choice,
}
# CHECK_METHODS is a list of unbound method objects; they are called
# by the constructor, in order, after all attributes are
# initialized. The list is created and filled in later, after all
# the methods are actually defined. (I just put it here because I
# like to define and document all class attributes in the same
# place.) Subclasses that add another _check_*() method should
# define their own CHECK_METHODS list that adds their check method
# to those from this class.
CHECK_METHODS = None
# -- Constructor/initialization methods ----------------------------
def __init__(self, *opts, **attrs):
# Set _short_opts, _long_opts attrs from 'opts' tuple.
# Have to be set now, in case no option strings are supplied.
self._short_opts = []
self._long_opts = []
opts = self._check_opt_strings(opts)
self._set_opt_strings(opts)
# Set all other attrs (action, type, etc.) from 'attrs' dict
self._set_attrs(attrs)
# Check all the attributes we just set. There are lots of
# complicated interdependencies, but luckily they can be farmed
# out to the _check_*() methods listed in CHECK_METHODS -- which
# could be handy for subclasses! The one thing these all share
# is that they raise OptionError if they discover a problem.
for checker in self.CHECK_METHODS:
checker(self)
def _check_opt_strings(self, opts):
# Filter out None because early versions of Optik had exactly
# one short option and one long option, either of which
# could be None.
opts = filter(None, opts)
if not opts:
raise TypeError("at least one option string must be supplied")
return opts
def _set_opt_strings(self, opts):
for opt in opts:
if len(opt) < 2:
raise OptionError(
"invalid option string %r: "
"must be at least two characters long" % opt, self)
elif len(opt) == 2:
if not (opt[0] == "-" and opt[1] != "-"):
raise OptionError(
"invalid short option string %r: "
"must be of the form -x, (x any non-dash char)" % opt,
self)
self._short_opts.append(opt)
else:
if not (opt[0:2] == "--" and opt[2] != "-"):
raise OptionError(
"invalid long option string %r: "
"must start with --, followed by non-dash" % opt,
self)
self._long_opts.append(opt)
def _set_attrs(self, attrs):
for attr in self.ATTRS:
if attrs.has_key(attr):
setattr(self, attr, attrs[attr])
del attrs[attr]
else:
if attr == 'default':
setattr(self, attr, NO_DEFAULT)
else:
setattr(self, attr, None)
if attrs:
attrs = attrs.keys()
attrs.sort()
raise OptionError(
"invalid keyword arguments: %s" % string.join(attrs, ", "),
self)
# -- Constructor validation methods --------------------------------
def _check_action(self):
if self.action is None:
self.action = "store"
elif self.action not in self.ACTIONS:
raise OptionError("invalid action: %r" % self.action, self)
def _check_type(self):
if self.type is None:
if self.action in self.ALWAYS_TYPED_ACTIONS:
if self.choices is not None:
# The "choices" attribute implies "choice" type.
self.type = "choice"
else:
# No type given? "string" is the most sensible default.
self.type = "string"
else:
# Allow type objects or builtin type conversion functions
# (int, str, etc.) as an alternative to their names. (The
# complicated check of __builtin__ is only necessary for
# Python 2.1 and earlier, and is short-circuited by the
# first check on modern Pythons.)
import __builtin__
if ( type(self.type) is types.TypeType or
(hasattr(self.type, "__name__") and
getattr(__builtin__, self.type.__name__, None) is self.type) ):
self.type = self.type.__name__
if self.type == "str":
self.type = "string"
if self.type not in self.TYPES:
raise OptionError("invalid option type: %r" % self.type, self)
if self.action not in self.TYPED_ACTIONS:
raise OptionError(
"must not supply a type for action %r" % self.action, self)
def _check_choice(self):
if self.type == "choice":
if self.choices is None:
raise OptionError(
"must supply a list of choices for type 'choice'", self)
elif type(self.choices) not in (types.TupleType, types.ListType):
raise OptionError(
"choices must be a list of strings ('%s' supplied)"
% string.split(str(type(self.choices)), "'")[1], self)
elif self.choices is not None:
raise OptionError(
"must not supply choices for type %r" % self.type, self)
def _check_dest(self):
# No destination given, and we need one for this action. The
# self.type check is for callbacks that take a value.
takes_value = (self.action in self.STORE_ACTIONS or
self.type is not None)
if self.dest is None and takes_value:
# Glean a destination from the first long option string,
# or from the first short option string if no long options.
if self._long_opts:
# eg. "--foo-bar" -> "foo_bar"
self.dest = string.replace(self._long_opts[0][2:], '-', '_')
else:
self.dest = self._short_opts[0][1]
def _check_const(self):
if self.action not in self.CONST_ACTIONS and self.const is not None:
raise OptionError(
"'const' must not be supplied for action %r" % self.action,
self)
def _check_nargs(self):
if self.action in self.TYPED_ACTIONS:
if self.nargs is None:
self.nargs = 1
elif self.nargs is not None:
raise OptionError(
"'nargs' must not be supplied for action %r" % self.action,
self)
def _check_callback(self):
if self.action == "callback":
if not callable(self.callback):
raise OptionError(
"callback not callable: %r" % self.callback, self)
if (self.callback_args is not None and
type(self.callback_args) is not types.TupleType):
raise OptionError(
"callback_args, if supplied, must be a tuple: not %r"
% self.callback_args, self)
if (self.callback_kwargs is not None and
type(self.callback_kwargs) is not types.DictType):
raise OptionError(
"callback_kwargs, if supplied, must be a dict: not %r"
% self.callback_kwargs, self)
else:
if self.callback is not None:
raise OptionError(
"callback supplied (%r) for non-callback option"
% self.callback, self)
if self.callback_args is not None:
raise OptionError(
"callback_args supplied for non-callback option", self)
if self.callback_kwargs is not None:
raise OptionError(
"callback_kwargs supplied for non-callback option", self)
CHECK_METHODS = [_check_action,
_check_type,
_check_choice,
_check_dest,
_check_const,
_check_nargs,
_check_callback]
# -- Miscellaneous methods -----------------------------------------
def __str__(self):
return string.join(self._short_opts + self._long_opts, "/")
__repr__ = _repr
def takes_value(self):
return self.type is not None
def get_opt_string(self):
if self._long_opts:
return self._long_opts[0]
else:
return self._short_opts[0]
# -- Processing methods --------------------------------------------
def check_value(self, opt, value):
checker = self.TYPE_CHECKER.get(self.type)
if checker is None:
return value
else:
return checker(self, opt, value)
def convert_value(self, opt, value):
if value is not None:
if self.nargs == 1:
return self.check_value(opt, value)
else:
return tuple(map(lambda v, o=opt, s=self: s.check_value(o, v), value))
def process(self, opt, value, values, parser):
# First, convert the value(s) to the right type. Howl if any
# value(s) are bogus.
value = self.convert_value(opt, value)
# And then take whatever action is expected of us.
# This is a separate method to make life easier for
# subclasses to add new actions.
return self.take_action(
self.action, self.dest, opt, value, values, parser)
def take_action(self, action, dest, opt, value, values, parser):
if action == "store":
setattr(values, dest, value)
elif action == "store_const":
setattr(values, dest, self.const)
elif action == "store_true":
setattr(values, dest, True)
elif action == "store_false":
setattr(values, dest, False)
elif action == "append":
values.ensure_value(dest, []).append(value)
elif action == "append_const":
values.ensure_value(dest, []).append(self.const)
elif action == "count":
setattr(values, dest, values.ensure_value(dest, 0) + 1)
elif action == "callback":
args = self.callback_args or ()
kwargs = self.callback_kwargs or {}
apply(self.callback, (self, opt, value, parser,) + args, kwargs)
elif action == "help":
parser.print_help()
parser.exit()
elif action == "version":
parser.print_version()
parser.exit()
else:
raise RuntimeError, "unknown action %r" % self.action
return 1
# class Option
SUPPRESS_HELP = "SUPPRESS"+"HELP"
SUPPRESS_USAGE = "SUPPRESS"+"USAGE"
# For compatibility with Python 2.2
try:
True, False
except NameError:
(True, False) = (1, 0)
try:
types.UnicodeType
except AttributeError:
def isbasestring(x):
return isinstance(x, types.StringType)
else:
def isbasestring(x):
return isinstance(x, types.StringType) or isinstance(x, types.UnicodeType)
class Values:
def __init__(self, defaults=None):
if defaults:
for (attr, val) in defaults.items():
setattr(self, attr, val)
def __str__(self):
return str(self.__dict__)
__repr__ = _repr
def __cmp__(self, other):
if isinstance(other, Values):
return cmp(self.__dict__, other.__dict__)
elif isinstance(other, types.DictType):
return cmp(self.__dict__, other)
else:
return -1
def _update_careful(self, dict):
"""
Update the option values from an arbitrary dictionary, but only
use keys from dict that already have a corresponding attribute
in self. Any keys in dict without a corresponding attribute
are silently ignored.
"""
for attr in dir(self):
if dict.has_key(attr):
dval = dict[attr]
if dval is not None:
setattr(self, attr, dval)
def _update_loose(self, dict):
"""
Update the option values from an arbitrary dictionary,
using all keys from the dictionary regardless of whether
they have a corresponding attribute in self or not.
"""
self.__dict__.update(dict)
def _update(self, dict, mode):
if mode == "careful":
self._update_careful(dict)
elif mode == "loose":
self._update_loose(dict)
else:
raise ValueError, "invalid update mode: %r" % mode
def read_module(self, modname, mode="careful"):
__import__(modname)
mod = sys.modules[modname]
self._update(vars(mod), mode)
def read_file(self, filename, mode="careful"):
vars = {}
execfile(filename, vars)
self._update(vars, mode)
def ensure_value(self, attr, value):
if not hasattr(self, attr) or getattr(self, attr) is None:
setattr(self, attr, value)
return getattr(self, attr)
class OptionContainer:
"""
Abstract base class.
Class attributes:
standard_option_list : [Option]
list of standard options that will be accepted by all instances
of this parser class (intended to be overridden by subclasses).
Instance attributes:
option_list : [Option]
the list of Option objects contained by this OptionContainer
_short_opt : { string : Option }
dictionary mapping short option strings, eg. "-f" or "-X",
to the Option instances that implement them. If an Option
has multiple short option strings, it will appears in this
dictionary multiple times. [1]
_long_opt : { string : Option }
dictionary mapping long option strings, eg. "--file" or
"--exclude", to the Option instances that implement them.
Again, a given Option can occur multiple times in this
dictionary. [1]
defaults : { string : any }
dictionary mapping option destination names to default
values for each destination [1]
[1] These mappings are common to (shared by) all components of the
controlling OptionParser, where they are initially created.
"""
def __init__(self, option_class, conflict_handler, description):
# Initialize the option list and related data structures.
# This method must be provided by subclasses, and it must
# initialize at least the following instance attributes:
# option_list, _short_opt, _long_opt, defaults.
self._create_option_list()
self.option_class = option_class
self.set_conflict_handler(conflict_handler)
self.set_description(description)
def _create_option_mappings(self):
# For use by OptionParser constructor -- create the master
# option mappings used by this OptionParser and all
# OptionGroups that it owns.
self._short_opt = {} # single letter -> Option instance
self._long_opt = {} # long option -> Option instance
self.defaults = {} # maps option dest -> default value
def _share_option_mappings(self, parser):
# For use by OptionGroup constructor -- use shared option
# mappings from the OptionParser that owns this OptionGroup.
self._short_opt = parser._short_opt
self._long_opt = parser._long_opt
self.defaults = parser.defaults
def set_conflict_handler(self, handler):
if handler not in ("error", "resolve"):
raise ValueError, "invalid conflict_resolution value %r" % handler
self.conflict_handler = handler
def set_description(self, description):
self.description = description
def get_description(self):
return self.description
def destroy(self):
"""see OptionParser.destroy()."""
del self._short_opt
del self._long_opt
del self.defaults
# -- Option-adding methods -----------------------------------------
def _check_conflict(self, option):
conflict_opts = []
for opt in option._short_opts:
if self._short_opt.has_key(opt):
conflict_opts.append((opt, self._short_opt[opt]))
for opt in option._long_opts:
if self._long_opt.has_key(opt):
conflict_opts.append((opt, self._long_opt[opt]))
if conflict_opts:
handler = self.conflict_handler
if handler == "error":
raise OptionConflictError(
"conflicting option string(s): %s"
% string.join(map(lambda co: co[0], conflict_opts), ", "),
option)
elif handler == "resolve":
for (opt, c_option) in conflict_opts:
if opt[:2] == "--":
c_option._long_opts.remove(opt)
del self._long_opt[opt]
else:
c_option._short_opts.remove(opt)
del self._short_opt[opt]
if not (c_option._short_opts or c_option._long_opts):
c_option.container.option_list.remove(c_option)
def add_option(self, *args, **kwargs):
"""add_option(Option)
add_option(opt_str, ..., kwarg=val, ...)
"""
if type(args[0]) is types.StringType:
option = apply(self.option_class, args, kwargs)
elif len(args) == 1 and not kwargs:
option = args[0]
if not isinstance(option, Option):
raise TypeError, "not an Option instance: %r" % option
else:
raise TypeError, "invalid arguments"
self._check_conflict(option)
self.option_list.append(option)
option.container = self
for opt in option._short_opts:
self._short_opt[opt] = option
for opt in option._long_opts:
self._long_opt[opt] = option
if option.dest is not None: # option has a dest, we need a default
if option.default is not NO_DEFAULT:
self.defaults[option.dest] = option.default
elif not self.defaults.has_key(option.dest):
self.defaults[option.dest] = None
return option
def add_options(self, option_list):
for option in option_list:
self.add_option(option)
# -- Option query/removal methods ----------------------------------
def get_option(self, opt_str):
return (self._short_opt.get(opt_str) or
self._long_opt.get(opt_str))
def has_option(self, opt_str):
return (self._short_opt.has_key(opt_str) or
self._long_opt.has_key(opt_str))
def remove_option(self, opt_str):
option = self._short_opt.get(opt_str)
if option is None:
option = self._long_opt.get(opt_str)
if option is None:
raise ValueError("no such option %r" % opt_str)
for opt in option._short_opts:
del self._short_opt[opt]
for opt in option._long_opts:
del self._long_opt[opt]
option.container.option_list.remove(option)
# -- Help-formatting methods ---------------------------------------
def format_option_help(self, formatter):
if not self.option_list:
return ""
result = []
for option in self.option_list:
if not option.help is SUPPRESS_HELP:
result.append(formatter.format_option(option))
return string.join(result, "")
def format_description(self, formatter):
return formatter.format_description(self.get_description())
def format_help(self, formatter):
result = []
if self.description:
result.append(self.format_description(formatter))
if self.option_list:
result.append(self.format_option_help(formatter))
return string.join(result, "\n")
class OptionGroup (OptionContainer):
def __init__(self, parser, title, description=None):
self.parser = parser
OptionContainer.__init__(
self, parser.option_class, parser.conflict_handler, description)
self.title = title
def _create_option_list(self):
self.option_list = []
self._share_option_mappings(self.parser)
def set_title(self, title):
self.title = title
def destroy(self):
"""see OptionParser.destroy()."""
OptionContainer.destroy(self)
del self.option_list
# -- Help-formatting methods ---------------------------------------
def format_help(self, formatter):
result = formatter.format_heading(self.title)
formatter.indent()
result = result + OptionContainer.format_help(self, formatter)
formatter.dedent()
return result
class OptionParser (OptionContainer):
"""
Class attributes:
standard_option_list : [Option]
list of standard options that will be accepted by all instances
of this parser class (intended to be overridden by subclasses).
Instance attributes:
usage : string
a usage string for your program. Before it is displayed
to the user, "%prog" will be expanded to the name of
your program (self.prog or os.path.basename(sys.argv[0])).
prog : string
the name of the current program (to override
os.path.basename(sys.argv[0])).
epilog : string
paragraph of help text to print after option help
option_groups : [OptionGroup]
list of option groups in this parser (option groups are
irrelevant for parsing the command-line, but very useful
for generating help)
allow_interspersed_args : bool = true
if true, positional arguments may be interspersed with options.
Assuming -a and -b each take a single argument, the command-line
-ablah foo bar -bboo baz
will be interpreted the same as
-ablah -bboo -- foo bar baz
If this flag were false, that command line would be interpreted as
-ablah -- foo bar -bboo baz
-- ie. we stop processing options as soon as we see the first
non-option argument. (This is the tradition followed by
Python's getopt module, Perl's Getopt::Std, and other argument-
parsing libraries, but it is generally annoying to users.)
process_default_values : bool = true
if true, option default values are processed similarly to option
values from the command line: that is, they are passed to the
type-checking function for the option's type (as long as the
default value is a string). (This really only matters if you
have defined custom types; see SF bug #955889.) Set it to false
to restore the behaviour of Optik 1.4.1 and earlier.
rargs : [string]
the argument list currently being parsed. Only set when
parse_args() is active, and continually trimmed down as
we consume arguments. Mainly there for the benefit of
callback options.
largs : [string]
the list of leftover arguments that we have skipped while
parsing options. If allow_interspersed_args is false, this
list is always empty.
values : Values
the set of option values currently being accumulated. Only
set when parse_args() is active. Also mainly for callbacks.
Because of the 'rargs', 'largs', and 'values' attributes,
OptionParser is not thread-safe. If, for some perverse reason, you
need to parse command-line arguments simultaneously in different
threads, use different OptionParser instances.
"""
standard_option_list = []
def __init__(self,
usage=None,
option_list=None,
option_class=Option,
version=None,
conflict_handler="error",
description=None,
formatter=None,
add_help_option=True,
prog=None,
epilog=None):
OptionContainer.__init__(
self, option_class, conflict_handler, description)
self.set_usage(usage)
self.prog = prog
self.version = version
self.allow_interspersed_args = True
self.process_default_values = True
if formatter is None:
formatter = IndentedHelpFormatter()
self.formatter = formatter
self.formatter.set_parser(self)
self.epilog = epilog
# Populate the option list; initial sources are the
# standard_option_list class attribute, the 'option_list'
# argument, and (if applicable) the _add_version_option() and
# _add_help_option() methods.
self._populate_option_list(option_list,
add_help=add_help_option)
self._init_parsing_state()
def destroy(self):
"""
Declare that you are done with this OptionParser. This cleans up
reference cycles so the OptionParser (and all objects referenced by
it) can be garbage-collected promptly. After calling destroy(), the
OptionParser is unusable.
"""
OptionContainer.destroy(self)
for group in self.option_groups:
group.destroy()
del self.option_list
del self.option_groups
del self.formatter
# -- Private methods -----------------------------------------------
# (used by our or OptionContainer's constructor)
def _create_option_list(self):
self.option_list = []
self.option_groups = []
self._create_option_mappings()
def _add_help_option(self):
self.add_option("-h", "--help",
action="help",
help=_("show this help message and exit"))
def _add_version_option(self):
self.add_option("--version",
action="version",
help=_("show program's version number and exit"))
def _populate_option_list(self, option_list, add_help=True):
if self.standard_option_list:
self.add_options(self.standard_option_list)
if option_list:
self.add_options(option_list)
if self.version:
self._add_version_option()
if add_help:
self._add_help_option()
def _init_parsing_state(self):
# These are set in parse_args() for the convenience of callbacks.
self.rargs = None
self.largs = None
self.values = None
# -- Simple modifier methods ---------------------------------------
def set_usage(self, usage):
if usage is None:
self.usage = _("%prog [options]")
elif usage is SUPPRESS_USAGE:
self.usage = None
# For backwards compatibility with Optik 1.3 and earlier.
elif string.lower(usage)[:7] == "usage: ":
self.usage = usage[7:]
else:
self.usage = usage
def enable_interspersed_args(self):
self.allow_interspersed_args = True
def disable_interspersed_args(self):
self.allow_interspersed_args = False
def set_process_default_values(self, process):
self.process_default_values = process
def set_default(self, dest, value):
self.defaults[dest] = value
def set_defaults(self, **kwargs):
self.defaults.update(kwargs)
def _get_all_options(self):
options = self.option_list[:]
for group in self.option_groups:
options.extend(group.option_list)
return options
def get_default_values(self):
if not self.process_default_values:
# Old, pre-Optik 1.5 behaviour.
return Values(self.defaults)
defaults = self.defaults.copy()
for option in self._get_all_options():
default = defaults.get(option.dest)
if isbasestring(default):
opt_str = option.get_opt_string()
defaults[option.dest] = option.check_value(opt_str, default)
return Values(defaults)
# -- OptionGroup methods -------------------------------------------
def add_option_group(self, *args, **kwargs):
# XXX lots of overlap with OptionContainer.add_option()
if type(args[0]) is types.StringType:
group = apply(OptionGroup, (self,) + args, kwargs)
elif len(args) == 1 and not kwargs:
group = args[0]
if not isinstance(group, OptionGroup):
raise TypeError, "not an OptionGroup instance: %r" % group
if group.parser is not self:
raise ValueError, "invalid OptionGroup (wrong parser)"
else:
raise TypeError, "invalid arguments"
self.option_groups.append(group)
return group
def get_option_group(self, opt_str):
option = (self._short_opt.get(opt_str) or
self._long_opt.get(opt_str))
if option and option.container is not self:
return option.container
return None
# -- Option-parsing methods ----------------------------------------
def _get_args(self, args):
if args is None:
return sys.argv[1:]
else:
return args[:] # don't modify caller's list
def parse_args(self, args=None, values=None):
"""
parse_args(args : [string] = sys.argv[1:],
values : Values = None)
-> (values : Values, args : [string])
Parse the command-line options found in 'args' (default:
sys.argv[1:]). Any errors result in a call to 'error()', which
by default prints the usage message to stderr and calls
sys.exit() with an error message. On success returns a pair
(values, args) where 'values' is an Values instance (with all
your option values) and 'args' is the list of arguments left
over after parsing options.
"""
rargs = self._get_args(args)
if values is None:
values = self.get_default_values()
# Store the halves of the argument list as attributes for the
# convenience of callbacks:
# rargs
# the rest of the command-line (the "r" stands for
# "remaining" or "right-hand")
# largs
# the leftover arguments -- ie. what's left after removing
# options and their arguments (the "l" stands for "leftover"
# or "left-hand")
self.rargs = rargs
self.largs = largs = []
self.values = values
try:
stop = self._process_args(largs, rargs, values)
except (BadOptionError, OptionValueError), err:
self.error(str(err))
args = largs + rargs
return self.check_values(values, args)
def check_values(self, values, args):
"""
check_values(values : Values, args : [string])
-> (values : Values, args : [string])
Check that the supplied option values and leftover arguments are
valid. Returns the option values and leftover arguments
(possibly adjusted, possibly completely new -- whatever you
like). Default implementation just returns the passed-in
values; subclasses may override as desired.
"""
return (values, args)
def _process_args(self, largs, rargs, values):
"""_process_args(largs : [string],
rargs : [string],
values : Values)
Process command-line arguments and populate 'values', consuming
options and arguments from 'rargs'. If 'allow_interspersed_args' is
false, stop at the first non-option argument. If true, accumulate any
interspersed non-option arguments in 'largs'.
"""
while rargs:
arg = rargs[0]
# We handle bare "--" explicitly, and bare "-" is handled by the
# standard arg handler since the short arg case ensures that the
# len of the opt string is greater than 1.
if arg == "--":
del rargs[0]
return
elif arg[0:2] == "--":
# process a single long option (possibly with value(s))
self._process_long_opt(rargs, values)
elif arg[:1] == "-" and len(arg) > 1:
# process a cluster of short options (possibly with
# value(s) for the last one only)
self._process_short_opts(rargs, values)
elif self.allow_interspersed_args:
largs.append(arg)
del rargs[0]
else:
return # stop now, leave this arg in rargs
# Say this is the original argument list:
# [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
# ^
# (we are about to process arg(i)).
#
# Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
# [arg0, ..., arg(i-1)] (any options and their arguments will have
# been removed from largs).
#
# The while loop will usually consume 1 or more arguments per pass.
# If it consumes 1 (eg. arg is an option that takes no arguments),
# then after _process_arg() is done the situation is:
#
# largs = subset of [arg0, ..., arg(i)]
# rargs = [arg(i+1), ..., arg(N-1)]
#
# If allow_interspersed_args is false, largs will always be
# *empty* -- still a subset of [arg0, ..., arg(i-1)], but
# not a very interesting subset!
def _match_long_opt(self, opt):
"""_match_long_opt(opt : string) -> string
Determine which long option string 'opt' matches, ie. which one
it is an unambiguous abbrevation for. Raises BadOptionError if
'opt' doesn't unambiguously match any long option string.
"""
return _match_abbrev(opt, self._long_opt)
def _process_long_opt(self, rargs, values):
arg = rargs.pop(0)
# Value explicitly attached to arg? Pretend it's the next
# argument.
if "=" in arg:
(opt, next_arg) = string.split(arg, "=", 1)
rargs.insert(0, next_arg)
had_explicit_value = True
else:
opt = arg
had_explicit_value = False
opt = self._match_long_opt(opt)
option = self._long_opt[opt]
if option.takes_value():
nargs = option.nargs
if len(rargs) < nargs:
if nargs == 1:
self.error(_("%s option requires an argument") % opt)
else:
self.error(_("%s option requires %d arguments")
% (opt, nargs))
elif nargs == 1:
value = rargs.pop(0)
else:
value = tuple(rargs[0:nargs])
del rargs[0:nargs]
elif had_explicit_value:
self.error(_("%s option does not take a value") % opt)
else:
value = None
option.process(opt, value, values, self)
def _process_short_opts(self, rargs, values):
arg = rargs.pop(0)
stop = False
i = 1
for ch in arg[1:]:
opt = "-" + ch
option = self._short_opt.get(opt)
i = i + 1 # we have consumed a character
if not option:
raise BadOptionError(opt)
if option.takes_value():
# Any characters left in arg? Pretend they're the
# next arg, and stop consuming characters of arg.
if i < len(arg):
rargs.insert(0, arg[i:])
stop = True
nargs = option.nargs
if len(rargs) < nargs:
if nargs == 1:
self.error(_("%s option requires an argument") % opt)
else:
self.error(_("%s option requires %d arguments")
% (opt, nargs))
elif nargs == 1:
value = rargs.pop(0)
else:
value = tuple(rargs[0:nargs])
del rargs[0:nargs]
else: # option doesn't take a value
value = None
option.process(opt, value, values, self)
if stop:
break
# -- Feedback methods ----------------------------------------------
def get_prog_name(self):
if self.prog is None:
return os.path.basename(sys.argv[0])
else:
return self.prog
def expand_prog_name(self, s):
return string.replace(s, "%prog", self.get_prog_name())
def get_description(self):
return self.expand_prog_name(self.description)
def exit(self, status=0, msg=None):
if msg:
sys.stderr.write(msg)
sys.exit(status)
def error(self, msg):
"""error(msg : string)
Print a usage message incorporating 'msg' to stderr and exit.
If you override this in a subclass, it should not return -- it
should either exit or raise an exception.
"""
self.print_usage(sys.stderr)
self.exit(2, "%s: error: %s\n" % (self.get_prog_name(), msg))
def get_usage(self):
if self.usage:
return self.formatter.format_usage(
self.expand_prog_name(self.usage))
else:
return ""
def print_usage(self, file=None):
"""print_usage(file : file = stdout)
Print the usage message for the current program (self.usage) to
'file' (default stdout). Any occurence of the string "%prog" in
self.usage is replaced with the name of the current program
(basename of sys.argv[0]). Does nothing if self.usage is empty
or not defined.
"""
if self.usage:
file.write(self.get_usage() + '\n')
def get_version(self):
if self.version:
return self.expand_prog_name(self.version)
else:
return ""
def print_version(self, file=None):
"""print_version(file : file = stdout)
Print the version message for this program (self.version) to
'file' (default stdout). As with print_usage(), any occurence
of "%prog" in self.version is replaced by the current program's
name. Does nothing if self.version is empty or undefined.
"""
if self.version:
file.write(self.get_version() + '\n')
def format_option_help(self, formatter=None):
if formatter is None:
formatter = self.formatter
formatter.store_option_strings(self)
result = []
result.append(formatter.format_heading(_("Options")))
formatter.indent()
if self.option_list:
result.append(OptionContainer.format_option_help(self, formatter))
result.append("\n")
for group in self.option_groups:
result.append(group.format_help(formatter))
result.append("\n")
formatter.dedent()
# Drop the last "\n", or the header if no options or option groups:
return string.join(result[:-1], "")
def format_epilog(self, formatter):
return formatter.format_epilog(self.epilog)
def format_help(self, formatter=None):
if formatter is None:
formatter = self.formatter
result = []
if self.usage:
result.append(self.get_usage() + "\n")
if self.description:
result.append(self.format_description(formatter) + "\n")
result.append(self.format_option_help(formatter))
result.append(self.format_epilog(formatter))
return string.join(result, "")
# used by test suite
def _get_encoding(self, file):
encoding = getattr(file, "encoding", None)
if not encoding:
encoding = sys.getdefaultencoding()
return encoding
def print_help(self, file=None):
"""print_help(file : file = stdout)
Print an extended help message, listing all options and any
help text provided with them, to 'file' (default stdout).
"""
if file is None:
file = sys.stdout
encoding = self._get_encoding(file)
file.write(encode_wrapper(self.format_help(), encoding, "replace"))
# class OptionParser
def _match_abbrev(s, wordmap):
"""_match_abbrev(s : string, wordmap : {string : Option}) -> string
Return the string key in 'wordmap' for which 's' is an unambiguous
abbreviation. If 's' is found to be ambiguous or doesn't match any of
'words', raise BadOptionError.
"""
# Is there an exact match?
if wordmap.has_key(s):
return s
else:
# Isolate all words with s as a prefix.
possibilities = filter(lambda w, s=s: w[:len(s)] == s, wordmap.keys())
# No exact match, so there had better be just one possibility.
if len(possibilities) == 1:
return possibilities[0]
elif not possibilities:
raise BadOptionError(s)
else:
# More than one possible completion: ambiguous prefix.
possibilities.sort()
raise AmbiguousOptionError(s, possibilities)
# Some day, there might be many Option classes. As of Optik 1.3, the
# preferred way to instantiate Options is indirectly, via make_option(),
# which will become a factory function when there are many Option
# classes.
make_option = Option
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
anryko/ansible | refs/heads/devel | test/units/module_utils/basic/test_safe_eval.py | 134 | # -*- coding: utf-8 -*-
# (c) 2015-2017, Toshio Kuratomi <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
from itertools import chain
import pytest
# Strings that should be converted into a typed value
VALID_STRINGS = (
("'a'", 'a'),
("'1'", '1'),
("1", 1),
("True", True),
("False", False),
("{}", {}),
)
# Passing things that aren't strings should just return the object
NONSTRINGS = (
({'a': 1}, {'a': 1}),
)
# These strings are not basic types. For security, these should not be
# executed. We return the same string and get an exception for some
INVALID_STRINGS = (
("a=1", "a=1", SyntaxError),
("a.foo()", "a.foo()", None),
("import foo", "import foo", None),
("__import__('foo')", "__import__('foo')", ValueError),
)
@pytest.mark.parametrize('code, expected, stdin',
((c, e, {}) for c, e in chain(VALID_STRINGS, NONSTRINGS)),
indirect=['stdin'])
def test_simple_types(am, code, expected):
# test some basic usage for various types
assert am.safe_eval(code) == expected
@pytest.mark.parametrize('code, expected, stdin',
((c, e, {}) for c, e in chain(VALID_STRINGS, NONSTRINGS)),
indirect=['stdin'])
def test_simple_types_with_exceptions(am, code, expected):
# Test simple types with exceptions requested
assert am.safe_eval(code, include_exceptions=True), (expected, None)
@pytest.mark.parametrize('code, expected, stdin',
((c, e, {}) for c, e, dummy in INVALID_STRINGS),
indirect=['stdin'])
def test_invalid_strings(am, code, expected):
assert am.safe_eval(code) == expected
@pytest.mark.parametrize('code, expected, exception, stdin',
((c, e, ex, {}) for c, e, ex in INVALID_STRINGS),
indirect=['stdin'])
def test_invalid_strings_with_exceptions(am, code, expected, exception):
res = am.safe_eval(code, include_exceptions=True)
assert res[0] == expected
if exception is None:
assert res[1] == exception
else:
assert type(res[1]) == exception
|
nikitabrazhnik/flask2 | refs/heads/master | Module 3/Chapter12/chapter_12/webapp/controllers/rest/post.py | 11 | import datetime
from flask import abort
from flask.ext.restful import Resource, fields, marshal_with
from webapp.models import db, User, Post, Tag
from .parsers import (
post_get_parser,
post_post_parser,
post_put_parser,
post_delete_parser
)
from .fields import HTMLField
nested_tag_fields = {
'id': fields.Integer(),
'title': fields.String()
}
post_fields = {
'id': fields.Integer(),
'author': fields.String(attribute=lambda x: x.user.username),
'title': fields.String(),
'text': HTMLField(),
'tags': fields.List(fields.Nested(nested_tag_fields)),
'publish_date': fields.DateTime(dt_format='iso8601')
}
class PostApi(Resource):
@marshal_with(post_fields)
def get(self, post_id=None):
if post_id:
post = Post.query.get(post_id)
if not post:
abort(404)
return post
else:
args = post_get_parser.parse_args()
page = args['page'] or 1
if args['user']:
user = User.query.filter_by(username=args['user']).first()
if not user:
abort(404)
posts = user.posts.order_by(
Post.publish_date.desc()
).paginate(page, 30)
else:
posts = Post.query.order_by(
Post.publish_date.desc()
).paginate(page, 30)
return posts.items
def post(self, post_id=None):
if post_id:
abort(400)
else:
args = post_post_parser.parse_args(strict=True)
user = User.verify_auth_token(args['token'])
if not user:
abort(401)
new_post = Post(args['title'])
new_post.user = user
new_post.date = datetime.datetime.now()
new_post.text = args['text']
if args['tags']:
for item in args['tags']:
tag = Tag.query.filter_by(title=item).first()
# Add the tag if it exists. If not, make a new tag
if tag:
new_post.tags.append(tag)
else:
new_tag = Tag(item)
new_post.tags.append(new_tag)
db.session.add(new_post)
db.session.commit()
return new_post.id, 201
def put(self, post_id=None):
if not post_id:
abort(400)
post = Post.query.get(post_id)
if not post:
abort(404)
args = post_put_parser.parse_args(strict=True)
user = User.verify_auth_token(args['token'])
if not user:
abort(401)
if user != post.user:
abort(403)
if args['title']:
post.title = args['title']
if args['text']:
post.text = args['text']
if args['tags']:
for item in args['tags']:
tag = Tag.query.filter_by(title=item).first()
# Add the tag if it exists. If not, make a new tag
if tag:
post.tags.append(tag)
else:
new_tag = Tag(item)
post.tags.append(new_tag)
db.session.add(post)
db.session.commit()
return post.id, 201
def delete(self, post_id=None):
if not post_id:
abort(400)
post = Post.query.get(post_id)
if not post:
abort(404)
args = post_delete_parser.parse_args(strict=True)
user = User.verify_auth_token(args['token'])
if user != post.user:
abort(401)
db.session.delete(post)
db.session.commit()
return "", 204
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.