repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
texttochange/vusion-backend | vusion/persist/schedule/tests/test_schedule.py | 1 | 1483 | """Tests for vusion.persist.schedule."""
from datetime import timedelta, datetime
from twisted.trial.unittest import TestCase
from vusion.persist import schedule_generator, DialogueSchedule
from tests.utils import ObjectMaker
from vusion.utils import time_from_vusion_format, time_to_vusion_format
class TestSchedule(TestCase, ObjectMaker):
def test_instanciate(self):
sometime = time_from_vusion_format('2014-10-02T10:00:00')
schedule = DialogueSchedule(**self.mkobj_schedule(date_time=time_to_vusion_format(sometime)))
self.assertEqual('2014-10-02T10:00:00', schedule['date-time'])
schedule = DialogueSchedule(**self.mkobj_schedule(date_time=sometime))
self.assertEqual('2014-10-02T10:00:00', schedule['date-time'])
def test_is_expired(self):
now = datetime.now()
schedule = schedule_generator(**self.mkobj_schedule(
date_time=time_to_vusion_format(now)))
self.assertFalse(schedule.is_expired(now))
past = now - timedelta(minutes=61)
schedule = schedule_generator(**self.mkobj_schedule(
date_time=time_to_vusion_format(past)))
self.assertTrue(schedule.is_expired(now))
future = now + timedelta(minutes=15)
schedule = schedule_generator(**self.mkobj_schedule(
date_time=time_to_vusion_format(future)))
self.assertFalse(schedule.is_expired(now)) | bsd-3-clause | 5,118,457,506,053,729,000 | 40.222222 | 101 | 0.660823 | false |
hanlind/nova | nova/tests/unit/api/openstack/compute/test_image_metadata.py | 8 | 17028 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
from oslo_serialization import jsonutils
import webob
from nova.api.openstack.compute import image_metadata as image_metadata_v21
from nova import exception
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import image_fixtures
IMAGE_FIXTURES = image_fixtures.get_image_fixtures()
CHK_QUOTA_STR = 'nova.api.openstack.common.check_img_metadata_properties_quota'
def get_image_123():
return copy.deepcopy(IMAGE_FIXTURES)[0]
class ImageMetaDataTestV21(test.NoDBTestCase):
controller_class = image_metadata_v21.ImageMetadataController
invalid_request = exception.ValidationError
def setUp(self):
super(ImageMetaDataTestV21, self).setUp()
self.controller = self.controller_class()
@mock.patch('nova.image.api.API.get', return_value=get_image_123())
def test_index(self, get_all_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/123/metadata')
res_dict = self.controller.index(req, '123')
expected = {'metadata': {'key1': 'value1'}}
self.assertEqual(res_dict, expected)
get_all_mocked.assert_called_once_with(mock.ANY, '123')
@mock.patch('nova.image.api.API.get', return_value=get_image_123())
def test_show(self, get_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/123/metadata/key1')
res_dict = self.controller.show(req, '123', 'key1')
self.assertIn('meta', res_dict)
self.assertEqual(len(res_dict['meta']), 1)
self.assertEqual('value1', res_dict['meta']['key1'])
get_mocked.assert_called_once_with(mock.ANY, '123')
@mock.patch('nova.image.api.API.get', return_value=get_image_123())
def test_show_not_found(self, _get_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/123/metadata/key9')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show, req, '123', 'key9')
@mock.patch('nova.image.api.API.get',
side_effect=exception.ImageNotFound(image_id='100'))
def test_show_image_not_found(self, _get_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/100/metadata/key1')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show, req, '100', 'key9')
@mock.patch(CHK_QUOTA_STR)
@mock.patch('nova.image.api.API.update')
@mock.patch('nova.image.api.API.get', return_value=get_image_123())
def test_create(self, get_mocked, update_mocked, quota_mocked):
mock_result = copy.deepcopy(get_image_123())
mock_result['properties']['key7'] = 'value7'
update_mocked.return_value = mock_result
req = fakes.HTTPRequest.blank('/v2/fake/images/123/metadata')
req.method = 'POST'
body = {"metadata": {"key7": "value7"}}
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, '123', body=body)
get_mocked.assert_called_once_with(mock.ANY, '123')
expected = copy.deepcopy(get_image_123())
expected['properties'] = {
'key1': 'value1', # existing meta
'key7': 'value7' # new meta
}
quota_mocked.assert_called_once_with(mock.ANY, expected["properties"])
update_mocked.assert_called_once_with(mock.ANY, '123', expected,
data=None, purge_props=True)
expected_output = {'metadata': {'key1': 'value1', 'key7': 'value7'}}
self.assertEqual(expected_output, res)
@mock.patch(CHK_QUOTA_STR)
@mock.patch('nova.image.api.API.update')
@mock.patch('nova.image.api.API.get',
side_effect=exception.ImageNotFound(image_id='100'))
def test_create_image_not_found(self, _get_mocked, update_mocked,
quota_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/100/metadata')
req.method = 'POST'
body = {"metadata": {"key7": "value7"}}
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.create, req, '100', body=body)
self.assertFalse(quota_mocked.called)
self.assertFalse(update_mocked.called)
@mock.patch(CHK_QUOTA_STR)
@mock.patch('nova.image.api.API.update')
@mock.patch('nova.image.api.API.get', return_value=get_image_123())
def test_update_all(self, get_mocked, update_mocked, quota_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/123/metadata')
req.method = 'PUT'
body = {"metadata": {"key9": "value9"}}
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
res = self.controller.update_all(req, '123', body=body)
get_mocked.assert_called_once_with(mock.ANY, '123')
expected = copy.deepcopy(get_image_123())
expected['properties'] = {
'key9': 'value9' # replace meta
}
quota_mocked.assert_called_once_with(mock.ANY, expected["properties"])
update_mocked.assert_called_once_with(mock.ANY, '123', expected,
data=None, purge_props=True)
expected_output = {'metadata': {'key9': 'value9'}}
self.assertEqual(expected_output, res)
@mock.patch(CHK_QUOTA_STR)
@mock.patch('nova.image.api.API.get',
side_effect=exception.ImageNotFound(image_id='100'))
def test_update_all_image_not_found(self, _get_mocked, quota_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/100/metadata')
req.method = 'PUT'
body = {"metadata": {"key9": "value9"}}
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.update_all, req, '100', body=body)
self.assertFalse(quota_mocked.called)
@mock.patch(CHK_QUOTA_STR)
@mock.patch('nova.image.api.API.update')
@mock.patch('nova.image.api.API.get', return_value=get_image_123())
def test_update_item(self, _get_mocked, update_mocked, quota_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/123/metadata/key1')
req.method = 'PUT'
body = {"meta": {"key1": "zz"}}
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
res = self.controller.update(req, '123', 'key1', body=body)
expected = copy.deepcopy(get_image_123())
expected['properties'] = {
'key1': 'zz' # changed meta
}
quota_mocked.assert_called_once_with(mock.ANY, expected["properties"])
update_mocked.assert_called_once_with(mock.ANY, '123', expected,
data=None, purge_props=True)
expected_output = {'meta': {'key1': 'zz'}}
self.assertEqual(res, expected_output)
@mock.patch(CHK_QUOTA_STR)
@mock.patch('nova.image.api.API.get',
side_effect=exception.ImageNotFound(image_id='100'))
def test_update_item_image_not_found(self, _get_mocked, quota_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/100/metadata/key1')
req.method = 'PUT'
body = {"meta": {"key1": "zz"}}
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.update, req, '100', 'key1',
body=body)
self.assertFalse(quota_mocked.called)
@mock.patch(CHK_QUOTA_STR)
@mock.patch('nova.image.api.API.update')
@mock.patch('nova.image.api.API.get')
def test_update_item_bad_body(self, get_mocked, update_mocked,
quota_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/123/metadata/key1')
req.method = 'PUT'
body = {"key1": "zz"}
req.body = b''
req.headers["content-type"] = "application/json"
self.assertRaises(self.invalid_request,
self.controller.update, req, '123', 'key1',
body=body)
self.assertFalse(get_mocked.called)
self.assertFalse(quota_mocked.called)
self.assertFalse(update_mocked.called)
@mock.patch(CHK_QUOTA_STR,
side_effect=webob.exc.HTTPBadRequest())
@mock.patch('nova.image.api.API.update')
@mock.patch('nova.image.api.API.get')
def test_update_item_too_many_keys(self, get_mocked, update_mocked,
_quota_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/123/metadata/key1')
req.method = 'PUT'
body = {"meta": {"foo": "bar"}}
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update, req, '123', 'key1',
body=body)
self.assertFalse(get_mocked.called)
self.assertFalse(update_mocked.called)
@mock.patch(CHK_QUOTA_STR)
@mock.patch('nova.image.api.API.update')
@mock.patch('nova.image.api.API.get', return_value=get_image_123())
def test_update_item_body_uri_mismatch(self, _get_mocked, update_mocked,
quota_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/123/metadata/bad')
req.method = 'PUT'
body = {"meta": {"key1": "value1"}}
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update, req, '123', 'bad',
body=body)
self.assertFalse(quota_mocked.called)
self.assertFalse(update_mocked.called)
@mock.patch('nova.image.api.API.update')
@mock.patch('nova.image.api.API.get', return_value=get_image_123())
def test_delete(self, _get_mocked, update_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/123/metadata/key1')
req.method = 'DELETE'
res = self.controller.delete(req, '123', 'key1')
expected = copy.deepcopy(get_image_123())
expected['properties'] = {}
update_mocked.assert_called_once_with(mock.ANY, '123', expected,
data=None, purge_props=True)
self.assertIsNone(res)
@mock.patch('nova.image.api.API.get', return_value=get_image_123())
def test_delete_not_found(self, _get_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/123/metadata/blah')
req.method = 'DELETE'
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.delete, req, '123', 'blah')
@mock.patch('nova.image.api.API.get',
side_effect=exception.ImageNotFound(image_id='100'))
def test_delete_image_not_found(self, _get_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/100/metadata/key1')
req.method = 'DELETE'
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.delete, req, '100', 'key1')
@mock.patch(CHK_QUOTA_STR,
side_effect=webob.exc.HTTPForbidden(explanation=''))
@mock.patch('nova.image.api.API.update')
@mock.patch('nova.image.api.API.get', return_value=get_image_123())
def test_too_many_metadata_items_on_create(self, _get_mocked,
update_mocked, _quota_mocked):
body = {"metadata": {"foo": "bar"}}
req = fakes.HTTPRequest.blank('/v2/fake/images/123/metadata')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.create, req, '123', body=body)
self.assertFalse(update_mocked.called)
@mock.patch(CHK_QUOTA_STR,
side_effect=webob.exc.HTTPForbidden(explanation=''))
@mock.patch('nova.image.api.API.update')
@mock.patch('nova.image.api.API.get', return_value=get_image_123())
def test_too_many_metadata_items_on_put(self, _get_mocked,
update_mocked, _quota_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/123/metadata/blah')
req.method = 'PUT'
body = {"meta": {"blah": "blah", "blah1": "blah1"}}
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
self.assertRaises(self.invalid_request,
self.controller.update, req, '123', 'blah',
body=body)
self.assertFalse(update_mocked.called)
@mock.patch('nova.image.api.API.get',
side_effect=exception.ImageNotAuthorized(image_id='123'))
def test_image_not_authorized_update(self, _get_mocked):
req = fakes.HTTPRequest.blank('/v2/fake/images/123/metadata/key1')
req.method = 'PUT'
body = {"meta": {"key1": "value1"}}
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.update, req, '123', 'key1',
body=body)
@mock.patch('nova.image.api.API.get',
side_effect=exception.ImageNotAuthorized(image_id='123'))
def test_image_not_authorized_update_all(self, _get_mocked):
image_id = 131
# see nova.tests.unit.api.openstack.fakes:_make_image_fixtures
req = fakes.HTTPRequest.blank('/v2/fake/images/%s/metadata/key1'
% image_id)
req.method = 'PUT'
body = {"metadata": {"key1": "value1"}}
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.update_all, req, image_id,
body=body)
@mock.patch('nova.image.api.API.get',
side_effect=exception.ImageNotAuthorized(image_id='123'))
def test_image_not_authorized_create(self, _get_mocked):
image_id = 131
# see nova.tests.unit.api.openstack.fakes:_make_image_fixtures
req = fakes.HTTPRequest.blank('/v2/fake/images/%s/metadata/key1'
% image_id)
req.method = 'POST'
body = {"metadata": {"key1": "value1"}}
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.create, req, image_id,
body=body)
class ImageMetadataControllerV239(test.NoDBTestCase):
def setUp(self):
super(ImageMetadataControllerV239, self).setUp()
self.controller = image_metadata_v21.ImageMetadataController()
self.req = fakes.HTTPRequest.blank('', version='2.39')
def test_not_found_for_all_image_metadata_api(self):
self.assertRaises(exception.VersionNotFoundForAPIMethod,
self.controller.index, self.req)
self.assertRaises(exception.VersionNotFoundForAPIMethod,
self.controller.show, self.req, fakes.FAKE_UUID)
self.assertRaises(exception.VersionNotFoundForAPIMethod,
self.controller.create, self.req,
fakes.FAKE_UUID, {'metadata': {}})
self.assertRaises(exception.VersionNotFoundForAPIMethod,
self.controller.update, self.req,
fakes.FAKE_UUID, 'id', {'metadata': {}})
self.assertRaises(exception.VersionNotFoundForAPIMethod,
self.controller.update_all, self.req,
fakes.FAKE_UUID, {'metadata': {}})
self.assertRaises(exception.VersionNotFoundForAPIMethod,
self.controller.delete, self.req, fakes.FAKE_UUID)
| apache-2.0 | -3,512,464,980,217,446,400 | 44.408 | 79 | 0.60565 | false |
miltonruelas/cursotecnico | branch/l10n_pe_ple/__init__.py | 8 | 1479 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2011 Cubic ERP - Teradata SAC. (http://cubicerp.com).
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import sunat
import tables
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 5,657,453,690,651,407,000 | 43.818182 | 78 | 0.686275 | false |
Bertrand256/dash-masternode-tool | src/dashd_intf.py | 1 | 69200 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Author: Bertrand256
# Created on: 2017-03
from __future__ import annotations
import decimal
import functools
import json
import os
import re
import socket
import ssl
import threading
import time
import datetime
import logging
from PyQt5.QtCore import QThread
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException, EncodeDecimal
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import padding
from paramiko import AuthenticationException, PasswordRequiredException, SSHException
from paramiko.ssh_exception import NoValidConnectionsError, BadAuthenticationType
from typing import List, Dict, Union, Callable, Optional
import app_cache
from app_config import AppConfig
from random import randint
from wnd_utils import WndUtils
import socketserver
import select
from psw_cache import SshPassCache
from common import AttrsProtected, CancelException
log = logging.getLogger('dmt.dashd_intf')
try:
import http.client as httplib
except ImportError:
import httplib
# how many seconds cached masternodes data are valid; cached masternode data is used only for non-critical
# features
MASTERNODES_CACHE_VALID_SECONDS = 60 * 60 # 60 minutes
PROTX_CACHE_VALID_SECONDS = 60 * 60
class ForwardServer (socketserver.ThreadingTCPServer):
daemon_threads = True
allow_reuse_address = True
class Handler(socketserver.BaseRequestHandler):
def handle(self):
try:
log.debug('Handler, starting ssh_transport.open_channel')
chan = self.ssh_transport.open_channel(kind='direct-tcpip',
dest_addr=(self.chain_host, self.chain_port),
src_addr=self.request.getpeername())
log.debug('Handler, started ssh_transport.open_channel')
except Exception as e:
log.error('open_channel error: ' + str(e))
if self.broken_conn_callback is not None:
self.broken_conn_callback()
return
if chan is None:
return
try:
while True:
r, w, x = select.select([self.request, chan], [], [], 10)
if self.request in r:
data = self.request.recv(1024)
if len(data) == 0:
break
chan.send(data)
log.debug(f'SSH tunnel - sent {len(data)} bytes')
if chan in r:
data = chan.recv(1024)
if len(data) == 0:
break
self.request.send(data)
log.debug(f'SSH tunnel - received {len(data)} bytes')
log.debug('Finishing Handler.handle')
except socket.error as e:
log.error('Handler socker.error occurred: ' + str(e))
except Exception as e:
log.error('Handler exception occurred: ' + str(e))
finally:
chan.close()
self.request.close()
class SSHTunnelThread(QThread):
def __init__(self, local_port, remote_ip, remote_port, transport, ready_event,
on_connection_broken_callback=None, on_finish_thread_callback=None):
QThread.__init__(self)
self.local_port = local_port
self.remote_ip = remote_ip
self.remote_port = remote_port
self.transport = transport
self.ready_event = ready_event
self.forward_server = None
self.on_connection_broken_callback = on_connection_broken_callback
self.on_finish_thread_callback = on_finish_thread_callback
self.setObjectName('SSHTunnelThread')
def __del__(self):
pass
def stop(self):
if self.forward_server:
self.forward_server.shutdown()
def handler_broken_connection_callback(self):
try:
self.stop()
if self.on_connection_broken_callback is not None:
self.on_connection_broken_callback()
except:
log.exception('Exception while shutting down forward server.')
def run(self):
class SubHandler(Handler):
chain_host = self.remote_ip
chain_port = self.remote_port
ssh_transport = self.transport
broken_conn_callback = self.handler_broken_connection_callback
try:
self.ready_event.set()
log.debug('Started SSHTunnelThread, local port forwarding 127.0.0.1:%s -> %s:%s' %
(str(self.local_port), self.remote_ip, str(self.remote_port)))
self.forward_server = ForwardServer(('127.0.0.1', self.local_port), SubHandler)
self.forward_server.serve_forever()
log.debug('Stopped local port forwarding 127.0.0.1:%s -> %s:%s' %
(str(self.local_port), self.remote_ip, str(self.remote_port)))
if self.on_finish_thread_callback:
self.on_finish_thread_callback()
except Exception as e:
log.exception('SSH tunnel exception occurred')
class UnknownError(Exception):
pass
class DashdConnectionError(Exception):
def __init__(self, org_exception):
Exception.__init__(org_exception)
self.org_exception = org_exception
class DashdSSH(object):
def __init__(self, host, port, username, on_connection_broken_callback=None, auth_method: str = 'password',
private_key_path: str = ''):
self.host = host
self.port = port
self.username = username
self.ssh = None
self.channel = None
self.fw_channel = None
self.connected = False
self.connection_broken = False
self.ssh_thread = None
self.auth_method = auth_method # 'any', 'password', 'key_pair', 'ssh_agent'
self.private_key_path = private_key_path
self.on_connection_broken_callback = on_connection_broken_callback
def __del__(self):
self.disconnect()
def remote_command(self, cmd):
channel = None
try:
channel = self.ssh.get_transport().open_session()
channel.exec_command(cmd)
ret_code = channel.recv_exit_status()
if ret_code == 0:
for idx in range(1, 20):
if channel.recv_ready():
break
time.sleep(0.1)
if not channel.recv_ready():
raise Exception('Data not ready')
data = channel.recv(500)
return data.decode().split('\n')
else:
for idx in range(1, 20):
if channel.recv_stderr_ready():
break
time.sleep(0.1)
if channel.recv_stderr_ready():
data = channel.recv_stderr(500)
error = data.decode()
raise Exception(error)
else:
raise UnknownError('Unknown error executing remote command: ' + cmd)
finally:
if channel:
channel.close()
def connect(self) -> bool:
import paramiko
if self.ssh is None:
self.ssh = paramiko.SSHClient()
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
password = None
pass_message = None
while True:
try:
if self.auth_method == 'any':
self.ssh.connect(self.host, port=int(self.port), username=self.username, password=password)
elif self.auth_method == 'password':
self.ssh.connect(self.host, port=int(self.port), username=self.username, password=password,
look_for_keys=False, allow_agent=False)
elif self.auth_method == 'key_pair':
if not self.private_key_path:
raise Exception('No RSA private key path was provided.')
self.ssh.connect(self.host, port=int(self.port), username=self.username, password=password,
key_filename=self.private_key_path, look_for_keys=False, allow_agent=False)
elif self.auth_method == 'ssh_agent':
self.ssh.connect(self.host, port=int(self.port), username=self.username, password=password,
look_for_keys=False, allow_agent=True)
self.connected = True
if password:
SshPassCache.save_password(self.username, self.host, password)
break
except PasswordRequiredException as e:
# private key with password protection is used; ask user for password
pass_message = "Enter passphrase for <b>private key</b> or password for %s" % \
(self.username + '@' + self.host)
while True:
password = SshPassCache.get_password(self.username, self.host, message=pass_message)
if password:
break
except BadAuthenticationType as e:
raise Exception(str(e))
except AuthenticationException as e:
# This exception will be raised in the following cases:
# 1. a private key with password protection is used but the user enters incorrect password
# 2. a private key exists but user's public key is not added to the server's allowed keys
# 3. normal login to server is performed but the user enters bad password
# So, in the first case, the second query for password will ask for normal password to server, not
# for a private key.
if self.auth_method == 'key_pair':
WndUtils.error_msg(message=f'Authentication failed for private key: {self.private_key_path} '
f'(username {self.username}).')
break
else:
if password is not None:
WndUtils.error_msg(message='Incorrect password, try again...')
while True:
password = SshPassCache.get_password(self.username, self.host, message=pass_message)
if password:
break
except SSHException as e:
if e.args and e.args[0] == 'No authentication methods available':
while True:
password = SshPassCache.get_password(self.username, self.host)
if password:
break
else:
raise
except Exception as e:
log.exception(str(e))
raise
return self.connected
def on_tunnel_thread_finish(self):
self.ssh_thread = None
def open_tunnel(self, local_port, remote_ip, remote_port):
if self.connected:
if self.ssh_thread is not None:
raise Exception('SSH tunnel already open.')
ready_event = threading.Event()
self.ssh_thread = SSHTunnelThread(local_port, remote_ip, remote_port, self.ssh.get_transport(), ready_event,
on_connection_broken_callback=self.on_connection_broken_callback,
on_finish_thread_callback=self.on_tunnel_thread_finish)
self.ssh_thread.start()
ready_event.wait(10)
# wait a moment for the tunnel to come-up
time.sleep(0.1)
log.debug('Started local port forwarding 127.0.0.1:%s -> %s:%s' %
(str(local_port), remote_ip, str(remote_port)))
else:
raise Exception('SSH not connected')
def find_dashd_config(self):
"""
Try to read configuration of remote dash daemon. In particular we need parameters concerning rpc
configuration.
:return: tuple (dashd_running, dashd_config_found, dashd config file contents as dict)
or error string in error occurred.
"""
dashd_running = False
dashd_config_found = False
if not self.ssh:
raise Exception('SSH session not ready')
try:
# find dashd process id if running
try:
pids = self.remote_command('ps -C "dashd" -o pid')
except UnknownError:
raise Exception('is dashd running on the remote machine?')
pid = None
if isinstance(pids, list):
pids = [pid.strip() for pid in pids]
if len(pids) >= 2 and pids[0] == 'PID' and re.match('\d+', pids[1]):
pid = pids[1]
elif len(pids) >= 1 and re.match('\d+', pids[0]):
pid = pids[1]
config = {}
if pid:
dashd_running = True
# using dashd pid find its executable path and then .dashcore directory and finally dash.conf file
executables = self.remote_command('ls -l /proc/' + str(pid) + '/exe')
if executables and len(executables) >= 1:
elems = executables[0].split('->')
if len(elems) == 2:
executable = elems[1].strip()
dashd_dir = os.path.dirname(executable)
dash_conf_file = dashd_dir + '/.dashcore/dash.conf'
conf_lines = []
try:
conf_lines = self.remote_command('cat ' + dash_conf_file)
except Exception as e:
# probably error no such file or directory
# try to read dashd's cwd + cmdline
cwd_lines = self.remote_command('ls -l /proc/' + str(pid) + '/cwd')
if cwd_lines:
elems = cwd_lines[0].split('->')
if len(elems) >= 2:
cwd = elems[1]
dash_conf_file = cwd + '/.dashcore/dash.conf'
try:
conf_lines = self.remote_command('cat ' + dash_conf_file)
except Exception as e:
# second method did not suceed, so assume, that conf file is located
# i /home/<username>/.dashcore directory
dash_conf_file = '/home/' + self.username + '/.dashcore/dash.conf'
conf_lines = self.remote_command('cat ' + dash_conf_file)
for line in conf_lines:
elems = [e.strip() for e in line.split('=')]
if len(elems) == 2:
config[elems[0]] = elems[1]
dashd_config_found = True
return dashd_running, dashd_config_found, config
except Exception as e:
return str(e)
def disconnect(self):
if self.ssh:
if self.ssh_thread:
self.ssh_thread.stop()
self.ssh.close()
del self.ssh
self.ssh = None
self.connected = False
class DashdIndexException(JSONRPCException):
"""
Exception for notifying, that dash daemon should have indexing option tuned on
"""
def __init__(self, parent_exception):
JSONRPCException.__init__(self, parent_exception.error)
self.message = self.message + \
'\n\nMake sure the dash daemon you are connecting to has the following options enabled in ' \
'its dash.conf:\n\n' + \
'addressindex=1\n' + \
'spentindex=1\n' + \
'timestampindex=1\n' + \
'txindex=1\n\n' + \
'Changing these parameters requires to execute dashd with "-reindex" option (linux: ./dashd -reindex)'
def control_rpc_call(_func=None, *, encrypt_rpc_arguments=False, allow_switching_conns=True):
"""
Decorator dedicated to functions related to RPC calls, taking care of switching an active connection if the
current one becomes faulty. It also performs argument encryption for configured RPC calls.
"""
def control_rpc_call_inner(func):
@functools.wraps(func)
def catch_timeout_wrapper(*args, **kwargs):
ret = None
last_exception = None
self = args[0]
self.mark_call_begin()
try:
self.http_lock.acquire()
last_conn_reset_time = None
for try_nr in range(1, 5):
try:
try:
if encrypt_rpc_arguments:
if self.cur_conn_def:
pubkey = self.cur_conn_def.get_rpc_encryption_pubkey_object()
else:
pubkey = None
if pubkey:
args_str = json.dumps(args[1:])
max_chunk_size = int(pubkey.key_size / 8) - 75
encrypted_parts = []
while args_str:
data_chunk = args_str[:max_chunk_size]
args_str = args_str[max_chunk_size:]
ciphertext = pubkey.encrypt(data_chunk.encode('ascii'),
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA256(),
label=None))
encrypted_parts.append(ciphertext.hex())
args = (args[0], 'DMTENCRYPTEDV1') + tuple(encrypted_parts)
log.info(
'Arguments of the "%s" call have been encrypted with the RSA public key of '
'the RPC node.', func.__name__)
ret = func(*args, **kwargs)
last_exception = None
self.mark_cur_conn_cfg_is_ok()
break
except (ConnectionResetError, ConnectionAbortedError, httplib.CannotSendRequest,
BrokenPipeError) as e:
# this exceptions occur usually when the established connection gets disconnected after
# some time of inactivity; try to reconnect within the same connection configuration
log.warning('Error while calling of "' + str(func) + ' (1)". Details: ' + str(e))
if last_conn_reset_time:
raise DashdConnectionError(e) # switch to another config if possible
else:
last_exception = e
last_conn_reset_time = time.time()
self.reset_connection() # retry with the same connection
except (socket.gaierror, ConnectionRefusedError, TimeoutError, socket.timeout,
NoValidConnectionsError) as e:
# exceptions raised most likely by not functioning dashd node; try to switch to another node
# if there is any in the config
log.warning('Error while calling of "' + str(func) + ' (3)". Details: ' + str(e))
raise DashdConnectionError(e)
except JSONRPCException as e:
log.error('Error while calling of "' + str(func) + ' (2)". Details: ' + str(e))
err_message = e.error.get('message','').lower()
self.http_conn.close()
if e.code == -5 and e.message == 'No information available for address':
raise DashdIndexException(e)
elif err_message.find('502 bad gateway') >= 0 or err_message.find('unknown error') >= 0:
raise DashdConnectionError(e)
else:
raise
except DashdConnectionError as e:
# try another net config if possible
log.error('Error while calling of "' + str(func) + '" (4). Details: ' + str(e))
if not allow_switching_conns or not self.switch_to_next_config():
self.last_error_message = str(e.org_exception)
raise e.org_exception # couldn't use another conn config, raise last exception
else:
try_nr -= 1 # another config retries do not count
last_exception = e.org_exception
except Exception:
raise
finally:
self.http_lock.release()
if last_exception:
raise last_exception
return ret
return catch_timeout_wrapper
if _func is None:
return control_rpc_call_inner
else:
return control_rpc_call_inner(_func)
class MasternodeProtx:
def __init__(self):
self.marker = False
self.modified = False
self.db_id: Optional[int] = None
self.protx_hash: str = ''
self.collateral_hash: str = ''
self.collateral_index: int = -1
self.collateral_address: str = ''
self.operator_reward: float = 0.0
self.service: str = ''
self.registered_height: int = -1
self.last_paid_height: int = -1
self.pose_penalty: int = 0
self.pose_revived_height: int = -1
self.pose_ban_height: int = -1
self.owner_address: str = ''
self.voting_address: str = ''
self.payout_address: str = ''
self.pubkey_operator: str = ''
self.operator_payout_address: str = ''
def clear(self):
self.db_id = None
self.protx_hash = ''
self.collateral_hash = ''
self.collateral_index = -1
self.collateral_address = ''
self.operator_reward = 0.0
self.service = ''
self.registered_height = -1
self.last_paid_height = -1
self.pose_penalty = 0
self.pose_revived_height = -1
self.pose_ban_height = -1
self.owner_address = ''
self.voting_address = ''
self.payout_address = ''
self.pubkey_operator = ''
self.operator_payout_address = ''
def copy_from(self, src: MasternodeProtx):
self.protx_hash = src.protx_hash
self.collateral_hash = src.collateral_hash
self.collateral_index = src.collateral_index
self.collateral_address = src.collateral_address
self.operator_reward = src.operator_reward
self.service = src.service
self.registered_height = src.registered_height
self.last_paid_height = src.last_paid_height
self.pose_penalty = src.pose_penalty
self.pose_revived_height = src.pose_revived_height
self.pose_ban_height = src.pose_ban_height
self.owner_address = src.owner_address
self.voting_address = src.voting_address
self.payout_address = src.payout_address
self.pubkey_operator = src.pubkey_operator
self.operator_payout_address = src.operator_payout_address
def copy_from_json(self, protx: Dict):
self.protx_hash = protx.get('proTxHash')
self.collateral_hash = protx.get('collateralHash')
self.collateral_index = protx.get('collateralIndex', 0)
self.collateral_address = protx.get('collateralAddress')
self.operator_reward = protx.get('operatorReward')
s = protx.get('state')
if s and isinstance(s, dict):
self.service = s.get('service')
self.registered_height = s.get('registeredHeight')
self.last_paid_height = s.get('lastPaidHeight')
self.pose_penalty = s.get('PoSePenalty')
self.pose_revived_height = s.get('PoSeRevivedHeight')
self.pose_ban_height = s.get('PoSeBanHeight')
self.owner_address = s.get('ownerAddress')
self.voting_address = s.get('votingAddress')
self.payout_address = s.get('payoutAddress')
self.pubkey_operator = s.get('pubKeyOperator')
self.operator_payout_address = s.get('operatorPayoutAddress')
def __setattr__(self, name, value):
if hasattr(self, name) and name not in ('modified', 'marker', 'db_id', '_AttrsProtected__allow_attr_definition'):
if isinstance(value, decimal.Decimal):
value = float(value)
if getattr(self, name) != value:
self.modified = True
super().__setattr__(name, value)
def update_in_db(self, cursor):
try:
if self.db_id is None:
cursor.execute(
"INSERT INTO protx(protx_hash, collateral_hash, collateral_index, collateral_address,"
"operator_reward, service, registered_height, last_paid_height, pose_penalty, "
"pose_revived_height, pose_ban_height, owner_address, voting_address, payout_address,"
"pubkey_operator, operator_payout_address) "
"VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
(self.protx_hash, self.collateral_hash, self.collateral_index,
self.collateral_address, self.operator_reward, self.service,
self.registered_height, self.last_paid_height, self.pose_penalty,
self.pose_revived_height, self.pose_ban_height, self.owner_address,
self.voting_address, self.payout_address, self.pubkey_operator,
self.operator_payout_address))
self.db_id = cursor.lastrowid
else:
cursor.execute(
"update protx set protx_hash=?, collateral_hash=?, collateral_index=?, collateral_address=?,"
"operator_reward=?, service=?, registered_height=?, last_paid_height=?, pose_penalty=?, "
"pose_revived_height=?, pose_ban_height=?, owner_address=?, voting_address=?, payout_address=?,"
"pubkey_operator=?, operator_payout_address=? where id=?",
(self.protx_hash, self.collateral_hash, self.collateral_index,
self.collateral_address, self.operator_reward, self.service,
self.registered_height, self.last_paid_height, self.pose_penalty,
self.pose_revived_height, self.pose_ban_height, self.owner_address,
self.voting_address, self.payout_address, self.pubkey_operator,
self.operator_payout_address, self.db_id))
except Exception as e:
log.exception(str(e))
def delete_from_db(self, cursor):
if self.db_id is not None:
cursor.execute("delete from protx where id=?", (self.db_id,))
class Masternode(AttrsProtected):
def __init__(self):
AttrsProtected.__init__(self)
self.ident: Optional[str] = None
self.status: Optional[str] = None
self.payee: Optional[str] = None
self.lastpaidtime = None
self.lastpaidblock = None
self.ip_port = None
self.protx_hash: Optional[str] = None
self.db_id = None
self.marker = None
self.modified = False
self.monitor_changes = False
self.queue_position = None
self.protx = MasternodeProtx()
self.set_attr_protection()
def copy_from(self, src: Masternode):
if self.ident != src.ident or self.status != src.ident or self.payee != src.payee or \
self.lastpaidtime != src.lastpaidtime or self.lastpaidblock != src.lastpaidblock or \
self.ip_port != src.ip_port or self.protx_hash != src.protx_hash or self.queue_position != src.queue_position:
self.ident = src.ident
self.status = src.status
self.payee = src.payee
self.lastpaidtime = src.lastpaidtime
self.lastpaidblock = src.lastpaidblock
self.ip_port = src.ip_port
self.protx_hash = src.protx_hash
self.queue_position = src.queue_position
self.modified = True
def copy_from_json(self, mn_ident: str, mn_json: Dict):
if self.ident != mn_ident:
self.ident = mn_ident
self.modified = True
if self.status != mn_json.get('status'):
self.status = mn_json.get('status')
self.modified = True
if self.payee != mn_json.get('payee'):
self.payee = mn_json.get('payee')
self.modified = True
if self.lastpaidtime != mn_json.get('lastpaidtime', 0):
self.lastpaidtime = mn_json.get('lastpaidtime', 0)
self.modified = True
if self.lastpaidblock != mn_json.get('lastpaidblock', 0):
self.lastpaidblock = mn_json.get('lastpaidblock', 0)
self.modified = True
if self.ip_port != mn_json.get('address'):
self.ip_port = mn_json.get('address')
self.modified = True
if self.protx_hash != mn_json.get('proTxHash'):
self.protx_hash = mn_json.get('proTxHash')
self.modified = True
@property
def registered_height(self):
if self.protx:
return self.protx.registered_height
def __setattr__(self, name, value):
if hasattr(self, name) and name not in ('modified', 'marker', 'monitor_changes', '_AttrsProtected__allow_attr_definition'):
if self.monitor_changes and getattr(self, name) != value:
self.modified = True
super().__setattr__(name, value)
def json_cache_wrapper(func, intf, cache_file_ident, skip_cache=False,
accept_cache_data_fun: Optional[Callable[[Dict], bool]]=None):
"""
Wrapper for saving/restoring rpc-call results inside cache files.
:param accept_cache_data_fun: reference to an external function verifying whether data read from cache
can be accepted; if not, a normal call to an rpc node will be executed
"""
def json_call_wrapper(*args, **kwargs):
nonlocal skip_cache, cache_file_ident, intf, func
fname = '/insight_dash_'
if intf.app_config.is_testnet:
fname += 'testnet_'
cache_file = intf.app_config.tx_cache_dir + fname + cache_file_ident + '.json'
if not skip_cache:
try: # looking into cache first
with open(cache_file) as fp:
j = json.load(fp, parse_float=decimal.Decimal)
if accept_cache_data_fun is None or accept_cache_data_fun(j):
return j
except:
pass
# if not found in cache, call the original function
j = func(*args, **kwargs)
try:
with open(cache_file, 'w') as fp:
json.dump(j, fp, default=EncodeDecimal)
except Exception as e:
log.exception('Cannot save data to a cache file')
pass
return j
return json_call_wrapper
class DashdInterface(WndUtils):
def __init__(self, window,
on_connection_initiated_callback=None,
on_connection_failed_callback=None,
on_connection_successful_callback=None,
on_connection_disconnected_callback=None):
WndUtils.__init__(self, app_config=None)
self.initialized = False
self.app_config = None
self.db_intf = None
self.connections = []
self.cur_conn_index = 0
self.cur_conn_def: Optional['DashNetworkConnectionCfg'] = None
self.block_timestamps: Dict[int, int] = {}
# below is the connection with which particular RPC call has started; if connection is switched because of
# problems with some nodes, switching stops if we close round and return to the starting connection
self.starting_conn = None
self.masternodes: List[Masternode] = []
self.masternodes_by_ident: Dict[str, Masternode] = {}
self.masternodes_by_ip_port: Dict[str, Masternode] = {}
self.protx_by_hash: Dict[str, MasternodeProtx] = {}
self.ssh = None
self.window = window
self.active = False
self.rpc_url = None
self.proxy = None
self.http_conn = None # HTTPConnection object passed to the AuthServiceProxy (for convinient connection reset)
self.on_connection_initiated_callback = on_connection_initiated_callback
self.on_connection_failed_callback = on_connection_failed_callback
self.on_connection_successful_callback = on_connection_successful_callback
self.on_connection_disconnected_callback = on_connection_disconnected_callback
self.last_error_message = None
self.mempool_txes: Dict[str, Dict] = {}
self.http_lock = threading.RLock()
def initialize(self, config: AppConfig, connection=None, for_testing_connections_only=False):
self.app_config = config
self.app_config = config
self.app_config = config
self.db_intf = self.app_config.db_intf
# conn configurations are used from the first item in the list; if one fails, then next is taken
if connection:
# this parameter is used for testing specific connection
self.connections = [connection]
else:
# get connection list orderd by priority of use
self.connections = self.app_config.get_ordered_conn_list()
self.cur_conn_index = 0
if self.connections:
self.cur_conn_def = self.connections[self.cur_conn_index]
else:
self.cur_conn_def = None
if not for_testing_connections_only:
self.load_data_from_db_cache()
self.initialized = True
def load_data_from_db_cache(self):
self.masternodes.clear()
self.protx_by_hash.clear()
self.masternodes_by_ident.clear()
self.masternodes_by_ip_port.clear()
self.block_timestamps.clear()
cur = self.db_intf.get_cursor()
cur2 = self.db_intf.get_cursor()
db_modified = False
try:
tm_start = time.time()
db_correction_duration = 0.0
log.debug("Reading masternode data from DB")
cur.execute("SELECT id, ident, status, payee, last_paid_time, last_paid_block, IP, queue_position, "
"protx_hash from MASTERNODES where dmt_active=1")
for row in cur.fetchall():
db_id = row[0]
ident = row[1]
# correct duplicated masternodes issue
mn_first = self.masternodes_by_ident.get(ident)
if mn_first is not None:
continue
# delete duplicated (caused by breaking the app while loading)
tm_start_1 = time.time()
cur2.execute('DELETE from MASTERNODES where ident=? and id<>?', (ident, db_id))
if cur2.rowcount > 0:
db_modified = True
db_correction_duration += (time.time() - tm_start_1)
mn = Masternode()
mn.db_id = db_id
mn.ident = ident
mn.status = row[2]
mn.payee = row[3]
mn.lastpaidtime = row[4]
mn.lastpaidblock = row[5]
mn.ip_port = row[6]
mn.queue_position = row[7]
mn.protx_hash = row[8]
self.masternodes.append(mn)
self.masternodes_by_ident[mn.ident] = mn
self.masternodes_by_ip_port[mn.ip_port] = mn
tm_diff = time.time() - tm_start
log.info('DB read time of %d MASTERNODES: %s s, db fix time: %s' %
(len(self.masternodes), str(tm_diff), str(db_correction_duration)))
log.debug("Reading protx data from DB")
cur.execute("SELECT id, protx_hash, collateral_hash, collateral_index, collateral_address,"
"operator_reward, service, registered_height, last_paid_height, pose_penalty,"
"pose_revived_height, pose_ban_height, owner_address, voting_address, payout_address,"
"pubkey_operator, operator_payout_address from protx")
for row in cur.fetchall():
protx = MasternodeProtx()
protx.db_id = row[0]
protx.protx_hash = row[1]
protx.collateral_hash = row[2]
protx.collateral_index = row[3]
protx.collateral_address = row[4]
protx.operator_reward = row[5]
protx.service = row[6]
protx.registered_height = row[7]
protx.last_paid_height = row[8]
protx.pose_penalty = row[9]
protx.pose_revived_height = row[10]
protx.pose_ban_height = row[11]
protx.owner_address = row[12]
protx.voting_address = row[13]
protx.payout_address = row[14]
protx.pubkey_operator = row[15]
protx.operator_payout_address = row[16]
protx.modified = False
self.protx_by_hash[protx.protx_hash] = protx
# assign protx objects to masternodes
for mn in self.masternodes:
protx = self.protx_by_hash.get(mn.protx_hash)
if protx and mn.protx != protx:
mn.protx = protx
log.debug("Finished reading protx data from DB")
except Exception as e:
log.exception('SQLite initialization error')
finally:
if db_modified:
self.db_intf.commit()
self.db_intf.release_cursor()
self.db_intf.release_cursor()
def reload_configuration(self):
"""Called after modification of connections' configuration or changes having impact on the file name
associated to database cache."""
# get connection list orderd by priority of use
self.disconnect()
self.connections = self.app_config.get_ordered_conn_list()
self.cur_conn_index = 0
if len(self.connections):
self.cur_conn_def = self.connections[self.cur_conn_index]
self.load_data_from_db_cache()
else:
self.cur_conn_def = None
def disconnect(self):
if self.active:
log.debug('Disconnecting')
if self.ssh:
self.ssh.disconnect()
del self.ssh
self.ssh = None
self.active = False
if self.on_connection_disconnected_callback:
self.on_connection_disconnected_callback()
def mark_call_begin(self):
self.starting_conn = self.cur_conn_def
def switch_to_next_config(self):
"""
If there is another dashd config not used recently, switch to it. Called only when there was a problem
with current connection config.
:return: True if successfully switched or False if there was no another config
"""
if self.cur_conn_def:
self.app_config.conn_cfg_failure(self.cur_conn_def) # mark connection as defective
if self.cur_conn_index < len(self.connections)-1:
idx = self.cur_conn_index + 1
else:
idx = 0
conn = self.connections[idx]
if conn != self.starting_conn and conn != self.cur_conn_def:
log.debug("Trying to switch to another connection: %s" % conn.get_description())
self.disconnect()
self.cur_conn_index = idx
self.cur_conn_def = conn
if not self.open():
return self.switch_to_next_config()
else:
return True
else:
log.warning('Failed to connect: no another connection configurations.')
return False
def mark_cur_conn_cfg_is_ok(self):
if self.cur_conn_def:
self.app_config.conn_cfg_success(self.cur_conn_def)
def open(self):
"""
Opens connection to dash RPC. If it fails, then the next enabled conn config will be used, if any exists.
:return: True if successfully connected, False if user cancelled the operation. If all of the attempts
fail, then appropriate exception will be raised.
"""
try:
if not self.cur_conn_def:
raise Exception('There is no connections to Dash network enabled in the configuration.')
while True:
try:
if self.open_internal():
break
else:
if not self.switch_to_next_config():
return False
except CancelException:
return False
except (socket.gaierror, ConnectionRefusedError, TimeoutError, socket.timeout,
NoValidConnectionsError) as e:
# exceptions raised by not likely functioning dashd node; try to switch to another node
# if there is any in the config
if not self.switch_to_next_config():
raise e # couldn't use another conn config, raise exception
else:
break
except Exception as e:
self.last_error_message = str(e)
raise
return True
def reset_connection(self):
"""
Called when communication errors are detected while sending RPC commands. Here we are closing the SSH-tunnel
(if used) and HTTP connection object to prepare for another try.
:return:
"""
if self.active:
if self.http_conn:
self.http_conn.close()
if self.ssh:
self.ssh.disconnect()
self.active = False
def open_internal(self):
"""
Try to establish connection to dash RPC daemon for current connection config.
:return: True, if connection successfully establishes, False if user Cancels the operation (not always
cancelling will be possible - only when user is prompted for a password).
"""
if not self.active:
log.info("Connecting to: %s" % self.cur_conn_def.get_description())
try:
# make the owner know, we are connecting
if self.on_connection_initiated_callback:
self.on_connection_initiated_callback()
except:
pass
if self.cur_conn_def.use_ssh_tunnel:
# RPC over SSH
if self.ssh is None:
self.ssh = DashdSSH(self.cur_conn_def.ssh_conn_cfg.host, self.cur_conn_def.ssh_conn_cfg.port,
self.cur_conn_def.ssh_conn_cfg.username,
auth_method=self.cur_conn_def.ssh_conn_cfg.auth_method,
private_key_path=self.cur_conn_def.ssh_conn_cfg.private_key_path)
try:
log.debug('starting ssh.connect')
self.ssh.connect()
log.debug('finished ssh.connect')
except Exception as e:
log.error('error in ssh.connect')
try:
# make the owner know, connection attempt failed
if self.on_connection_failed_callback:
self.on_connection_failed_callback()
except:
log.exception('on_connection_try_fail_callback call exception')
raise
# configure SSH tunnel
# get random local unprivileged port number to establish SSH tunnel
success = False
local_port = None
for try_nr in range(1, 10):
try:
log.debug(f'beginning ssh.open_tunnel, try: {try_nr}')
local_port = randint(2000, 50000)
self.ssh.open_tunnel(local_port,
self.cur_conn_def.host,
int(self.cur_conn_def.port))
success = True
break
except Exception as e:
log.exception('error in ssh.open_tunnel loop: ' + str(e))
log.debug('finished ssh.open_tunnel loop')
if not success:
log.error('finished ssh.open_tunnel loop with error')
return False
else:
rpc_user = self.cur_conn_def.username
rpc_password = self.cur_conn_def.password
rpc_host = '127.0.0.1' # SSH tunnel on loopback
rpc_port = local_port
else:
# direct RPC
rpc_host = self.cur_conn_def.host
rpc_port = self.cur_conn_def.port
rpc_user = self.cur_conn_def.username
rpc_password = self.cur_conn_def.password
if self.cur_conn_def.use_ssl:
self.rpc_url = 'https://'
self.http_conn = httplib.HTTPSConnection(rpc_host, rpc_port, timeout=5, context=ssl._create_unverified_context())
else:
self.rpc_url = 'http://'
self.http_conn = httplib.HTTPConnection(rpc_host, rpc_port, timeout=5)
self.rpc_url += rpc_user + ':' + rpc_password + '@' + rpc_host + ':' + str(rpc_port)
log.debug('AuthServiceProxy configured to: %s' % self.rpc_url)
self.proxy = AuthServiceProxy(self.rpc_url, timeout=1000, connection=self.http_conn)
try:
# check the connection
self.http_conn.connect()
log.debug('Successfully connected AuthServiceProxy')
try:
# make the owner know, we successfully finished connection
if self.on_connection_successful_callback:
self.on_connection_successful_callback()
except:
log.exception('on_connection_finished_callback call exception')
except:
log.exception('Connection failed')
try:
# make the owner know, connection attempt failed
if self.on_connection_failed_callback:
self.on_connection_failed_callback()
if self.ssh:
# if there is a ssh connection established earlier, disconnect it because apparently it isn't
# functioning
self.ssh.disconnect()
except:
log.exception('on_connection_try_fail_callback call exception')
raise
finally:
log.debug('http_conn.close()')
self.http_conn.close()
# timeout hase been initially set to 5 seconds to perform 'quick' connection test
self.http_conn.timeout = 20
self.active = True
return self.active
def get_active_conn_description(self):
if self.cur_conn_def:
return self.cur_conn_def.get_description()
else:
return '???'
@control_rpc_call
def getblockcount(self):
if self.open():
return self.proxy.getblockcount()
else:
raise Exception('Not connected')
@control_rpc_call
def getblockchaininfo(self, verify_node: bool = True):
if self.open():
info = self.proxy.getblockchaininfo()
if verify_node:
node_under_testnet = (info.get('chain') == 'test')
if self.app_config.is_testnet and not node_under_testnet:
raise Exception('This RPC node works under Dash MAINNET, but your current configuration is '
'for TESTNET.')
elif self.app_config.is_mainnet and node_under_testnet:
raise Exception('This RPC node works under Dash TESTNET, but your current configuration is '
'for MAINNET.')
return info
else:
raise Exception('Not connected')
@control_rpc_call
def getnetworkinfo(self):
if self.open():
info = self.proxy.getnetworkinfo()
return info
else:
raise Exception('Not connected')
@control_rpc_call
def issynchronized(self):
if self.open():
try:
syn = self.proxy.mnsync('status')
return syn.get('IsSynced')
except JSONRPCException as e:
if str(e).lower().find('403 forbidden') >= 0:
self.http_conn.close()
return True
else:
raise
else:
raise Exception('Not connected')
@control_rpc_call
def mnsync(self):
if self.open():
# if connecting to HTTP(S) proxy do not call this function - it will not be exposed
if self.cur_conn_def.is_http_proxy():
return {}
else:
return self.proxy.mnsync('status')
else:
raise Exception('Not connected')
@control_rpc_call
def masternodebroadcast(self, what, hexto):
if self.open():
return self.proxy.masternodebroadcast(what, hexto)
else:
raise Exception('Not connected')
def reset_masternode_data_cache(self):
cache_item_name = 'ProtxLastReadTime_' + self.app_config.dash_network
app_cache.set_value(cache_item_name, 0)
cache_item_name = f'MasternodesLastReadTime_{self.app_config.dash_network}'
app_cache.set_value(cache_item_name, 0)
def _read_protx_list(self, data_max_age: int = PROTX_CACHE_VALID_SECONDS, feedback_fun: Optional[Callable] = None):
cache_item_name = 'ProtxLastReadTime_' + self.app_config.dash_network
last_read_time = app_cache.get_value(cache_item_name, 0, int)
if not self.protx_by_hash or data_max_age == 0 or (int(time.time()) - last_read_time) >= data_max_age:
log.info('Fetching protx data from network')
for protx_hash in self.protx_by_hash.keys():
protx = self.protx_by_hash[protx_hash]
protx.marker = False
protx.modified = False
# read protx list from the network:
protx_list = self.proxy.protx('list', 'registered', True)
app_cache.set_value(cache_item_name, int(time.time()))
# update local cache in RAM
for protx_json in protx_list:
if feedback_fun:
feedback_fun()
protx_hash = protx_json.get('proTxHash')
if protx_hash:
protx = self.protx_by_hash.get(protx_hash)
if not protx:
protx = MasternodeProtx()
self.protx_by_hash[protx_hash] = protx
protx.copy_from_json(protx_json)
protx.marker = True
# update db cache:
db_modified = False
cur = None
try:
if self.db_intf.db_active:
cur = self.db_intf.get_cursor()
for protx_hash in self.protx_by_hash.keys():
protx = self.protx_by_hash[protx_hash]
if protx.db_id is None or protx.modified:
protx.update_in_db(cur)
db_modified = True
# remove non existing protx entries
protx_to_remove = []
for protx_hash in self.protx_by_hash.keys():
protx = self.protx_by_hash[protx_hash]
if not protx.marker:
protx_to_remove.append(protx)
for protx in protx_to_remove:
protx.delete_from_db(cur)
del self.protx_by_hash[protx.protx_hash]
finally:
if db_modified:
self.db_intf.commit()
if cur is not None:
self.db_intf.release_cursor()
log.info('Finished fetching protx data from network')
return self.protx_by_hash
def _update_mn_queue_values(self, masternodes: List[Masternode]):
"""
Updates masternode payment queue order values.
"""
payment_queue = []
for mn in masternodes:
if mn.status == 'ENABLED':
protx = mn.protx
if mn.lastpaidblock > 0:
mn.queue_position = mn.lastpaidblock
else:
if protx:
mn.queue_position = protx.registered_height
else:
mn.queue_position = None
if protx:
pose_revived_height = protx.pose_revived_height
if pose_revived_height > 0 and pose_revived_height > mn.lastpaidblock:
mn.queue_position = pose_revived_height
payment_queue.append(mn)
else:
mn.queue_position = None
payment_queue.sort(key=lambda x: x.queue_position, reverse=False)
for mn in masternodes:
if mn.status == 'ENABLED':
mn.queue_position = payment_queue.index(mn)
@control_rpc_call
def get_masternodelist(self, *args, data_max_age=MASTERNODES_CACHE_VALID_SECONDS,
protx_data_max_age=PROTX_CACHE_VALID_SECONDS,
feedback_fun: Optional[Callable] = None) -> List[Masternode]:
"""
Returns masternode list, read from the Dash network or from the internal cache.
:param args: arguments passed to the 'masternodelist' RPC call
:param data_max_age: maximum age (in seconds) of the cached masternode data to used; if the
cache is older than 'data_max_age', then an RPC call is performed to load newer masternode data;
value of 0 forces reading of the new data from the network
:return: list of Masternode objects, matching the 'args' arguments
"""
if self.open():
if len(args) == 1 and args[0] == 'json':
last_read_time = app_cache.get_value(f'MasternodesLastReadTime_{self.app_config.dash_network}', 0, int)
if self.masternodes and data_max_age > 0 and int(time.time()) - last_read_time < data_max_age:
return self.masternodes
else:
self._read_protx_list(protx_data_max_age, feedback_fun=feedback_fun)
for mn in self.masternodes:
mn.marker = False # mark to delete masternode existing in cache but no longer
# existing on the network
mn.modified = False
mns_json = self.proxy.masternodelist(*args)
app_cache.set_value(f'MasternodesLastReadTime_{self.app_config.dash_network}', int(time.time()))
for mn_id in mns_json.keys():
if feedback_fun:
feedback_fun()
mn_json = mns_json.get(mn_id)
mn = self.masternodes_by_ident.get(mn_id)
if not mn:
mn = Masternode()
mn.copy_from_json(mn_id, mn_json)
self.masternodes.append(mn)
self.masternodes_by_ident[mn_id] = mn
self.masternodes_by_ip_port[mn.ip_port] = mn
else:
mn.copy_from_json(mn_id, mn_json)
mn.marker = True
protx = self.protx_by_hash.get(mn.protx_hash)
if protx and mn.protx != protx:
mn.protx = protx
self._update_mn_queue_values(self.masternodes)
# save masternodes to the db cache
db_modified = False
cur = None
try:
if self.db_intf.db_active:
cur = self.db_intf.get_cursor()
for mn in self.masternodes:
if feedback_fun:
feedback_fun()
if mn.db_id is None:
# Masternode entry not in db cache yet
if self.db_intf.db_active:
cur.execute(
"INSERT INTO MASTERNODES(ident, status, payee, "
" last_paid_time, last_paid_block, ip, protx_hash, "
" registered_height, dmt_active, dmt_create_time, queue_position) "
"VALUES (?,?,?,?,?,?,?,?,?,?,?)",
(mn.ident, mn.status, mn.payee,
mn.lastpaidtime, mn.lastpaidblock, mn.ip_port, mn.protx_hash,
mn.registered_height, 1, datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
mn.queue_position))
mn.db_id = cur.lastrowid
db_modified = True
else:
if mn.modified:
cur.execute(
"UPDATE MASTERNODES set ident=?, status=?, payee=?, "
"last_paid_time=?, last_paid_block=?, ip=?, protx_hash=?, "
"registered_height=?, queue_position=? WHERE id=?",
(mn.ident, mn.status, mn.payee,
mn.lastpaidtime, mn.lastpaidblock, mn.ip_port, mn.protx_hash, mn.registered_height,
mn.queue_position, mn.db_id))
db_modified = True
# remove non existing masternodes from cache
for mn_index in reversed(range(len(self.masternodes))):
if feedback_fun:
feedback_fun()
mn = self.masternodes[mn_index]
if not mn.marker:
if self.db_intf.db_active:
cur.execute("UPDATE MASTERNODES set dmt_active=0, dmt_deactivation_time=?"
"WHERE ID=?",
(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), mn.db_id))
db_modified = True
self.masternodes_by_ident.pop(mn.ident,0)
del self.masternodes[mn_index]
finally:
if db_modified:
self.db_intf.commit()
if cur is not None:
self.db_intf.release_cursor()
return self.masternodes
else:
mns = self.proxy.masternodelist(*args)
return mns
else:
raise Exception('Not connected')
@control_rpc_call
def getaddressbalance(self, addresses):
if self.open():
return self.proxy.getaddressbalance({'addresses': addresses})
else:
raise Exception('Not connected')
@control_rpc_call
def getaddressutxos(self, addresses):
if self.open():
return self.proxy.getaddressutxos({'addresses': addresses})
else:
raise Exception('Not connected')
@control_rpc_call
def getaddressmempool(self, addresses):
if self.open():
return self.proxy.getaddressmempool({'addresses': addresses})
else:
raise Exception('Not connected')
@control_rpc_call
def getrawmempool(self):
if self.open():
return self.proxy.getrawmempool()
else:
raise Exception('Not connected')
@control_rpc_call
def getrawtransaction(self, txid, verbose, skip_cache=False):
def check_if_tx_confirmed(tx_json):
# cached transaction will not be accepted if the transaction stored in cache file was not confirmed
if tx_json.get('confirmations'):
return True
return False
if self.open():
tx_json = json_cache_wrapper(self.proxy.getrawtransaction, self, 'tx-' + str(verbose) + '-' + txid,
skip_cache=skip_cache, accept_cache_data_fun=check_if_tx_confirmed)\
(txid, verbose)
return tx_json
else:
raise Exception('Not connected')
@control_rpc_call
def getblockhash(self, blockid, skip_cache=False):
if self.open():
return json_cache_wrapper(self.proxy.getblockhash, self, 'blockhash-' + str(blockid),
skip_cache=skip_cache)(blockid)
else:
raise Exception('Not connected')
@control_rpc_call
def getblockheader(self, blockhash, skip_cache=False):
if self.open():
return json_cache_wrapper(self.proxy.getblockheader, self, 'blockheader-' + str(blockhash),
skip_cache=skip_cache)(blockhash)
else:
raise Exception('Not connected')
@control_rpc_call
def validateaddress(self, address):
if self.open():
return self.proxy.validateaddress(address)
else:
raise Exception('Not connected')
@control_rpc_call
def decoderawtransaction(self, rawtx):
if self.open():
return self.proxy.decoderawtransaction(rawtx)
else:
raise Exception('Not connected')
@control_rpc_call
def sendrawtransaction(self, tx, use_instant_send):
if self.open():
return self.proxy.sendrawtransaction(tx, False, use_instant_send)
else:
raise Exception('Not connected')
@control_rpc_call
def getcurrentvotes(self, hash):
if self.open():
return self.proxy.getcurrentvotes(hash)
else:
raise Exception('Not connected')
@control_rpc_call
def gobject(self, *args):
if self.open():
return self.proxy.gobject(*args)
else:
raise Exception('Not connected')
@control_rpc_call
def masternode(self, *args):
if self.open():
return self.proxy.masternode(*args)
else:
raise Exception('Not connected')
@control_rpc_call
def getgovernanceinfo(self):
if self.open():
return self.proxy.getgovernanceinfo()
else:
raise Exception('Not connected')
@control_rpc_call
def getsuperblockbudget(self, block_index):
if self.open():
return self.proxy.getsuperblockbudget(block_index)
else:
raise Exception('Not connected')
@control_rpc_call
def voteraw(self, masternode_tx_hash, masternode_tx_index, governance_hash, vote_signal, vote, sig_time, vote_sig):
if self.open():
return self.proxy.voteraw(masternode_tx_hash, masternode_tx_index, governance_hash, vote_signal, vote,
sig_time, vote_sig)
else:
raise Exception('Not connected')
@control_rpc_call
def getaddressdeltas(self, *args):
if self.open():
return self.proxy.getaddressdeltas(*args)
else:
raise Exception('Not connected')
@control_rpc_call
def getaddresstxids(self, *args):
if self.open():
return self.proxy.getaddresstxids(*args)
else:
raise Exception('Not connected')
def protx(self, *args):
if self.open():
return self.proxy.protx(*args)
else:
raise Exception('Not connected')
@control_rpc_call
def spork(self, *args):
if self.open():
return self.proxy.spork(*args)
else:
raise Exception('Not connected')
def rpc_call(self, encrypt_rpc_arguments: bool, allow_switching_conns: bool, command: str, *args):
def call_command(self, *args):
c = self.proxy.__getattr__(command)
return c(*args)
if self.open():
call_command.__setattr__('__name__', command)
fun = control_rpc_call(call_command, encrypt_rpc_arguments=encrypt_rpc_arguments,
allow_switching_conns=allow_switching_conns)
c = fun(self, *args)
return c
else:
raise Exception('Not connected')
@control_rpc_call
def listaddressbalances(self, minfee):
if self.open():
return self.proxy.listaddressbalances(minfee)
else:
raise Exception('Not connected')
@control_rpc_call
def checkfeaturesupport(self, feature_name: str, dmt_version: str, *args) -> Dict:
if self.open():
return self.proxy.checkfeaturesupport(feature_name, dmt_version)
else:
raise Exception('Not connected')
def get_block_timestamp(self, block: int):
ts = self.block_timestamps.get(block)
if ts is None:
bhash = self.getblockhash(block)
bh = self.getblockheader(bhash)
ts = bh['time']
self.block_timestamps[block] = ts
return ts
def fetch_mempool_txes(self, feedback_fun: Optional[Callable] = None):
cur_mempool_txes = self.proxy.getrawmempool()
txes_to_purge = []
for tx_hash in self.mempool_txes:
if tx_hash not in cur_mempool_txes:
txes_to_purge.append(tx_hash)
for tx_hash in txes_to_purge:
del self.mempool_txes[tx_hash]
for tx_hash in cur_mempool_txes:
if feedback_fun:
feedback_fun()
tx = self.mempool_txes.get(tx_hash)
if not tx:
tx = self.getrawtransaction(tx_hash, True, skip_cache=True)
self.mempool_txes[tx_hash] = tx
def is_protx_update_pending(self, protx_hash: str, ip_port: str = None) -> bool:
"""
Check whether a protx transaction related to the proregtx passed as an argument exists in mempool.
:param protx_hash: Hash of the ProRegTx transaction
:return:
"""
try:
for tx_hash in self.mempool_txes:
tx = self.mempool_txes[tx_hash]
protx = tx.get('proUpRegTx')
if not protx:
protx = tx.get('proUpRevTx')
if not protx:
protx = tx.get('proUpServTx')
if not protx:
protx = tx.get('proRegTx')
if protx and (protx.get('proTxHash') == protx_hash) or (ip_port and protx.get('service') == ip_port):
return True
return False
except Exception as e:
return False
| mit | 8,209,623,023,224,227,000 | 41.821782 | 131 | 0.529335 | false |
Ruide/angr-dev | angr/angr/analyses/identifier/functions/atoi.py | 5 | 2123 |
import random
import string
from ..func import Func, TestData
class atoi(Func):
def __init__(self):
super(atoi, self).__init__()
self.skips_whitespace = False
self.allows_negative = True
def rand_str(self, length, byte_list=None): #pylint disable=no-self-use
if byte_list is None:
return "".join(chr(random.randint(0, 255)) for _ in xrange(length))
return "".join(random.choice(byte_list) for _ in xrange(length))
def num_args(self):
return 1
def get_name(self):
if self.allows_negative:
suffix = ""
else:
suffix = "_no_signs"
if self.skips_whitespace:
return "atoi_whitespace_skip" + suffix
return "atoi" + suffix
def gen_input_output_pair(self):
num = random.randint(-(2**26), 2**26-1)
if not self.allows_negative:
num = abs(num)
s = str(num)
test_input = [s]
test_output = [s]
return_val = num
max_steps = 20
return TestData(test_input, test_output, return_val, max_steps)
def pre_test(self, func, runner):
num = random.randint(-(2 ** 26), 2 ** 26 - 1)
num = abs(num)
s = str(num)
test_input = [s]
test_output = [s]
return_val = num
max_steps = 20
test = TestData(test_input, test_output, return_val, max_steps)
if not runner.test(func, test):
return False
s = str(num)
s = self.rand_str(10, string.whitespace) + s
test_input = [s]
test_output = [s]
return_val = num
max_steps = 20
test = TestData(test_input, test_output, return_val, max_steps)
self.skips_whitespace = runner.test(func, test)
num = -random.randint(2000, 8000)
s = str(num)
test_input = [s]
test_output = [s]
return_val = num
max_steps = 20
test = TestData(test_input, test_output, return_val, max_steps)
if not runner.test(func, test):
self.allows_negative = False
return True
| bsd-2-clause | 7,520,515,114,895,593,000 | 26.934211 | 79 | 0.545455 | false |
hockeybuggy/twitter-sentiment | src/conductor.py | 1 | 3409 | #!/usr/bin/env python
# File : conductor.py
# Author : Douglas Anderson
# Description: Simple driver for sentiment analysis implementation
import os, sys
import tokenize
import normalize
import labelselect
import statsify
import wordselection
import dictizer
import split_dataset
from Token import Token
from parse_args import parse_args
from train import maxent_classifier
from train import maxent_classifier_with_validation
from train import naive_bayes_classifier
if __name__ == "__main__":
args = parse_args()
print "Opening dataset..."
tokens = tokenize.open_tweets_file("../data/b.tsv", 0, args.items)
print "Selecting labels..."
tokens = labelselect.__call__(tokens, args.labels) # Select only the labels
print "Normalizing dataset..."
#tokens = normalize.__call__(tokens) # Normalize the tokens
if args.normalize and args.normalize_words:
normalize.normalize_words(tokens)
if args.normalize and args.normalize_punct:
normalize.normalize_punct(tokens)
if args.normalize and args.normalize_emoticons:
normalize.normalize_emoticons(tokens)
if args.normalize and args.normalize_users:
normalize.normalize_users(tokens)
if args.normalize and args.normalize_hashtags:
normalize.normalize_hashtags(tokens)
if args.normalize and args.normalize_nums:
normalize.normalize_nums(tokens)
if args.normalize and args.normalize_urls:
normalize.normalize_urls(tokens)
print "Transforming dataset..."
feature_list = dictizer.__call__(tokens)
docfreq = wordselection.calculate_docfreq(feature_list)
if args.stopword_removal:
print "Removing stopwords from the dataset..."
feature_list = wordselection.remove_stopwords(feature_list)
if args.uncommon_selection:
print "Removing uncommon words from the dataset..."
feature_list = wordselection.remove_uncommon(feature_list, docfreq, args.df_cutoff)
wordselection.print_reatined_features(docfreq, args.df_cutoff)
# Write the features out to a file
with open("filtered_docs.txt", "w") as w:
for row in feature_list:
w.write(str(row[0]) + "\n")
print "Generating feature set statistics..."
statsify.__call__(feature_list, args.labels)
print "Splitting the dataset..."
if args.validation_metric == "none":
train_set, _, test_set = split_dataset.__call__(feature_list, 0.2)
else:
train_set, validation_set, test_set = split_dataset.__call__(feature_list, 0.2, validation_size=0.2)
if args.classifier_type == "max_ent":
if args.minlldelta:
classifier = maxent_classifier(train_set, lldelta=args.minlldelta)
elif args.minll:
classifier = maxent_classifier(train_set, ll=args.minll)
elif args.validation_metric != "none":
classifier = maxent_classifier_with_validation(train_set, validation_set,
args.validation_metric, 3)
elif args.numIterations:
classifier = maxent_classifier(train_set, iterations=args.numIterations)
else:
print "Error no cut off set"
sys.exit(0)
else:
classifier = naive_bayes_classifier(train_set)
print "\nTesting"
classifier.test(test_set, args.labels)
classifier.show_informative_features(30)
#classifier.inspect_errors(test_set)
| mit | -6,072,132,011,073,518,000 | 33.785714 | 108 | 0.683778 | false |
dannyboi104/SickRage | lib/dogpile/cache/util.py | 47 | 5679 | from hashlib import sha1
import inspect
import re
import collections
from . import compat
def coerce_string_conf(d):
result = {}
for k, v in d.items():
if not isinstance(v, compat.string_types):
result[k] = v
continue
v = v.strip()
if re.match(r'^[-+]?\d+$', v):
result[k] = int(v)
elif re.match(r'^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][-+]?\d+)?$', v):
result[k] = float(v)
elif v.lower() in ('false', 'true'):
result[k] = v.lower() == 'true'
elif v == 'None':
result[k] = None
else:
result[k] = v
return result
class PluginLoader(object):
def __init__(self, group):
self.group = group
self.impls = {}
def load(self, name):
if name in self.impls:
return self.impls[name]()
else: # pragma NO COVERAGE
import pkg_resources
for impl in pkg_resources.iter_entry_points(
self.group, name):
self.impls[name] = impl.load
return impl.load()
else:
raise Exception(
"Can't load plugin %s %s" %
(self.group, name))
def register(self, name, modulepath, objname):
def load():
mod = __import__(modulepath, fromlist=[objname])
return getattr(mod, objname)
self.impls[name] = load
def function_key_generator(namespace, fn, to_str=compat.string_type):
"""Return a function that generates a string
key, based on a given function as well as
arguments to the returned function itself.
This is used by :meth:`.CacheRegion.cache_on_arguments`
to generate a cache key from a decorated function.
It can be replaced using the ``function_key_generator``
argument passed to :func:`.make_region`.
"""
if namespace is None:
namespace = '%s:%s' % (fn.__module__, fn.__name__)
else:
namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace)
args = inspect.getargspec(fn)
has_self = args[0] and args[0][0] in ('self', 'cls')
def generate_key(*args, **kw):
if kw:
raise ValueError(
"dogpile.cache's default key creation "
"function does not accept keyword arguments.")
if has_self:
args = args[1:]
return namespace + "|" + " ".join(map(to_str, args))
return generate_key
def function_multi_key_generator(namespace, fn, to_str=compat.string_type):
if namespace is None:
namespace = '%s:%s' % (fn.__module__, fn.__name__)
else:
namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace)
args = inspect.getargspec(fn)
has_self = args[0] and args[0][0] in ('self', 'cls')
def generate_keys(*args, **kw):
if kw:
raise ValueError(
"dogpile.cache's default key creation "
"function does not accept keyword arguments.")
if has_self:
args = args[1:]
return [namespace + "|" + key for key in map(to_str, args)]
return generate_keys
def sha1_mangle_key(key):
"""a SHA1 key mangler."""
return sha1(key).hexdigest()
def length_conditional_mangler(length, mangler):
"""a key mangler that mangles if the length of the key is
past a certain threshold.
"""
def mangle(key):
if len(key) >= length:
return mangler(key)
else:
return key
return mangle
class memoized_property(object):
"""A read-only @property that is only evaluated once."""
def __init__(self, fget, doc=None):
self.fget = fget
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
def __get__(self, obj, cls):
if obj is None:
return self
obj.__dict__[self.__name__] = result = self.fget(obj)
return result
def to_list(x, default=None):
"""Coerce to a list."""
if x is None:
return default
if not isinstance(x, (list, tuple)):
return [x]
else:
return x
class KeyReentrantMutex(object):
def __init__(self, key, mutex, keys):
self.key = key
self.mutex = mutex
self.keys = keys
@classmethod
def factory(cls, mutex):
# this collection holds zero or one
# thread idents as the key; a set of
# keynames held as the value.
keystore = collections.defaultdict(set)
def fac(key):
return KeyReentrantMutex(key, mutex, keystore)
return fac
def acquire(self, wait=True):
current_thread = compat.threading.current_thread().ident
keys = self.keys.get(current_thread)
if keys is not None and \
self.key not in keys:
# current lockholder, new key. add it in
keys.add(self.key)
return True
elif self.mutex.acquire(wait=wait):
# after acquire, create new set and add our key
self.keys[current_thread].add(self.key)
return True
else:
return False
def release(self):
current_thread = compat.threading.current_thread().ident
keys = self.keys.get(current_thread)
assert keys is not None, "this thread didn't do the acquire"
assert self.key in keys, "No acquire held for key '%s'" % self.key
keys.remove(self.key)
if not keys:
# when list of keys empty, remove
# the thread ident and unlock.
del self.keys[current_thread]
self.mutex.release()
| gpl-3.0 | 113,900,008,304,315,400 | 28.123077 | 77 | 0.550625 | false |
pawelmhm/scrapy | tests/test_webclient.py | 3 | 16648 | """
from twisted.internet import defer
Tests borrowed from the twisted.web.client tests.
"""
import os
import shutil
import sys
from pkg_resources import parse_version
import cryptography
import OpenSSL.SSL
from twisted.trial import unittest
from twisted.web import server, static, util, resource
from twisted.internet import reactor, defer
try:
from twisted.internet.testing import StringTransport
except ImportError:
# deprecated in Twisted 19.7.0
# (remove once we bump our requirement past that version)
from twisted.test.proto_helpers import StringTransport
from twisted.python.filepath import FilePath
from twisted.protocols.policies import WrappingFactory
from twisted.internet.defer import inlineCallbacks
from twisted.web.test.test_webclient import (
ForeverTakingResource,
ErrorResource,
NoLengthResource,
HostHeaderResource,
PayloadResource,
BrokenDownloadResource,
)
from scrapy.core.downloader import webclient as client
from scrapy.core.downloader.contextfactory import ScrapyClientContextFactory
from scrapy.http import Request, Headers
from scrapy.settings import Settings
from scrapy.utils.misc import create_instance
from scrapy.utils.python import to_bytes, to_unicode
from tests.mockserver import ssl_context_factory
def getPage(url, contextFactory=None, response_transform=None, *args, **kwargs):
"""Adapted version of twisted.web.client.getPage"""
def _clientfactory(url, *args, **kwargs):
url = to_unicode(url)
timeout = kwargs.pop('timeout', 0)
f = client.ScrapyHTTPClientFactory(
Request(url, *args, **kwargs), timeout=timeout)
f.deferred.addCallback(response_transform or (lambda r: r.body))
return f
from twisted.web.client import _makeGetterFactory
return _makeGetterFactory(
to_bytes(url), _clientfactory, contextFactory=contextFactory, *args, **kwargs
).deferred
class ParseUrlTestCase(unittest.TestCase):
"""Test URL parsing facility and defaults values."""
def _parse(self, url):
f = client.ScrapyHTTPClientFactory(Request(url))
return (f.scheme, f.netloc, f.host, f.port, f.path)
def testParse(self):
lip = '127.0.0.1'
tests = (
("http://127.0.0.1?c=v&c2=v2#fragment", ('http', lip, lip, 80, '/?c=v&c2=v2')),
("http://127.0.0.1/?c=v&c2=v2#fragment", ('http', lip, lip, 80, '/?c=v&c2=v2')),
("http://127.0.0.1/foo?c=v&c2=v2#frag", ('http', lip, lip, 80, '/foo?c=v&c2=v2')),
("http://127.0.0.1:100?c=v&c2=v2#fragment", ('http', lip + ':100', lip, 100, '/?c=v&c2=v2')),
("http://127.0.0.1:100/?c=v&c2=v2#frag", ('http', lip + ':100', lip, 100, '/?c=v&c2=v2')),
("http://127.0.0.1:100/foo?c=v&c2=v2#frag", ('http', lip + ':100', lip, 100, '/foo?c=v&c2=v2')),
("http://127.0.0.1", ('http', lip, lip, 80, '/')),
("http://127.0.0.1/", ('http', lip, lip, 80, '/')),
("http://127.0.0.1/foo", ('http', lip, lip, 80, '/foo')),
("http://127.0.0.1?param=value", ('http', lip, lip, 80, '/?param=value')),
("http://127.0.0.1/?param=value", ('http', lip, lip, 80, '/?param=value')),
("http://127.0.0.1:12345/foo", ('http', lip + ':12345', lip, 12345, '/foo')),
("http://spam:12345/foo", ('http', 'spam:12345', 'spam', 12345, '/foo')),
("http://spam.test.org/foo", ('http', 'spam.test.org', 'spam.test.org', 80, '/foo')),
("https://127.0.0.1/foo", ('https', lip, lip, 443, '/foo')),
("https://127.0.0.1/?param=value", ('https', lip, lip, 443, '/?param=value')),
("https://127.0.0.1:12345/", ('https', lip + ':12345', lip, 12345, '/')),
("http://scrapytest.org/foo ", ('http', 'scrapytest.org', 'scrapytest.org', 80, '/foo')),
("http://egg:7890 ", ('http', 'egg:7890', 'egg', 7890, '/')),
)
for url, test in tests:
test = tuple(
to_bytes(x) if not isinstance(x, int) else x for x in test)
self.assertEqual(client._parse(url), test, url)
class ScrapyHTTPPageGetterTests(unittest.TestCase):
def test_earlyHeaders(self):
# basic test stolen from twisted HTTPageGetter
factory = client.ScrapyHTTPClientFactory(Request(
url='http://foo/bar',
body="some data",
headers={
'Host': 'example.net',
'User-Agent': 'fooble',
'Cookie': 'blah blah',
'Content-Length': '12981',
'Useful': 'value'}))
self._test(
factory,
b"GET /bar HTTP/1.0\r\n"
b"Content-Length: 9\r\n"
b"Useful: value\r\n"
b"Connection: close\r\n"
b"User-Agent: fooble\r\n"
b"Host: example.net\r\n"
b"Cookie: blah blah\r\n"
b"\r\n"
b"some data")
# test minimal sent headers
factory = client.ScrapyHTTPClientFactory(Request('http://foo/bar'))
self._test(
factory,
b"GET /bar HTTP/1.0\r\n"
b"Host: foo\r\n"
b"\r\n")
# test a simple POST with body and content-type
factory = client.ScrapyHTTPClientFactory(Request(
method='POST',
url='http://foo/bar',
body='name=value',
headers={'Content-Type': 'application/x-www-form-urlencoded'}))
self._test(
factory,
b"POST /bar HTTP/1.0\r\n"
b"Host: foo\r\n"
b"Connection: close\r\n"
b"Content-Type: application/x-www-form-urlencoded\r\n"
b"Content-Length: 10\r\n"
b"\r\n"
b"name=value")
# test a POST method with no body provided
factory = client.ScrapyHTTPClientFactory(Request(
method='POST',
url='http://foo/bar'
))
self._test(
factory,
b"POST /bar HTTP/1.0\r\n"
b"Host: foo\r\n"
b"Content-Length: 0\r\n"
b"\r\n")
# test with single and multivalued headers
factory = client.ScrapyHTTPClientFactory(Request(
url='http://foo/bar',
headers={
'X-Meta-Single': 'single',
'X-Meta-Multivalued': ['value1', 'value2'],
},
))
self._test(
factory,
b"GET /bar HTTP/1.0\r\n"
b"Host: foo\r\n"
b"X-Meta-Multivalued: value1\r\n"
b"X-Meta-Multivalued: value2\r\n"
b"X-Meta-Single: single\r\n"
b"\r\n")
# same test with single and multivalued headers but using Headers class
factory = client.ScrapyHTTPClientFactory(Request(
url='http://foo/bar',
headers=Headers({
'X-Meta-Single': 'single',
'X-Meta-Multivalued': ['value1', 'value2'],
}),
))
self._test(
factory,
b"GET /bar HTTP/1.0\r\n"
b"Host: foo\r\n"
b"X-Meta-Multivalued: value1\r\n"
b"X-Meta-Multivalued: value2\r\n"
b"X-Meta-Single: single\r\n"
b"\r\n")
def _test(self, factory, testvalue):
transport = StringTransport()
protocol = client.ScrapyHTTPPageGetter()
protocol.factory = factory
protocol.makeConnection(transport)
self.assertEqual(
set(transport.value().splitlines()),
set(testvalue.splitlines()))
return testvalue
def test_non_standard_line_endings(self):
# regression test for: http://dev.scrapy.org/ticket/258
factory = client.ScrapyHTTPClientFactory(Request(
url='http://foo/bar'))
protocol = client.ScrapyHTTPPageGetter()
protocol.factory = factory
protocol.headers = Headers()
protocol.dataReceived(b"HTTP/1.0 200 OK\n")
protocol.dataReceived(b"Hello: World\n")
protocol.dataReceived(b"Foo: Bar\n")
protocol.dataReceived(b"\n")
self.assertEqual(protocol.headers, Headers({'Hello': ['World'], 'Foo': ['Bar']}))
class EncodingResource(resource.Resource):
out_encoding = 'cp1251'
def render(self, request):
body = to_unicode(request.content.read())
request.setHeader(b'content-encoding', self.out_encoding)
return body.encode(self.out_encoding)
class WebClientTestCase(unittest.TestCase):
def _listen(self, site):
return reactor.listenTCP(0, site, interface="127.0.0.1")
def setUp(self):
self.tmpname = self.mktemp()
os.mkdir(self.tmpname)
FilePath(self.tmpname).child("file").setContent(b"0123456789")
r = static.File(self.tmpname)
r.putChild(b"redirect", util.Redirect(b"/file"))
r.putChild(b"wait", ForeverTakingResource())
r.putChild(b"error", ErrorResource())
r.putChild(b"nolength", NoLengthResource())
r.putChild(b"host", HostHeaderResource())
r.putChild(b"payload", PayloadResource())
r.putChild(b"broken", BrokenDownloadResource())
r.putChild(b"encoding", EncodingResource())
self.site = server.Site(r, timeout=None)
self.wrapper = WrappingFactory(self.site)
self.port = self._listen(self.wrapper)
self.portno = self.port.getHost().port
@inlineCallbacks
def tearDown(self):
yield self.port.stopListening()
shutil.rmtree(self.tmpname)
def getURL(self, path):
return f"http://127.0.0.1:{self.portno}/{path}"
def testPayload(self):
s = "0123456789" * 10
return getPage(self.getURL("payload"), body=s).addCallback(
self.assertEqual, to_bytes(s))
def testHostHeader(self):
# if we pass Host header explicitly, it should be used, otherwise
# it should extract from url
return defer.gatherResults([
getPage(self.getURL("host")).addCallback(
self.assertEqual, to_bytes(f"127.0.0.1:{self.portno}")),
getPage(self.getURL("host"), headers={"Host": "www.example.com"}).addCallback(
self.assertEqual, to_bytes("www.example.com"))])
def test_getPage(self):
"""
L{client.getPage} returns a L{Deferred} which is called back with
the body of the response if the default method B{GET} is used.
"""
d = getPage(self.getURL("file"))
d.addCallback(self.assertEqual, b"0123456789")
return d
def test_getPageHead(self):
"""
L{client.getPage} returns a L{Deferred} which is called back with
the empty string if the method is C{HEAD} and there is a successful
response code.
"""
def _getPage(method):
return getPage(self.getURL("file"), method=method)
return defer.gatherResults([
_getPage("head").addCallback(self.assertEqual, b""),
_getPage("HEAD").addCallback(self.assertEqual, b"")])
def test_timeoutNotTriggering(self):
"""
When a non-zero timeout is passed to L{getPage} and the page is
retrieved before the timeout period elapses, the L{Deferred} is
called back with the contents of the page.
"""
d = getPage(self.getURL("host"), timeout=100)
d.addCallback(
self.assertEqual, to_bytes(f"127.0.0.1:{self.portno}"))
return d
def test_timeoutTriggering(self):
"""
When a non-zero timeout is passed to L{getPage} and that many
seconds elapse before the server responds to the request. the
L{Deferred} is errbacked with a L{error.TimeoutError}.
"""
finished = self.assertFailure(
getPage(self.getURL("wait"), timeout=0.000001),
defer.TimeoutError)
def cleanup(passthrough):
# Clean up the server which is hanging around not doing
# anything.
connected = list(self.wrapper.protocols.keys())
# There might be nothing here if the server managed to already see
# that the connection was lost.
if connected:
connected[0].transport.loseConnection()
return passthrough
finished.addBoth(cleanup)
return finished
def testNotFound(self):
return getPage(self.getURL('notsuchfile')).addCallback(self._cbNoSuchFile)
def _cbNoSuchFile(self, pageData):
self.assertIn(b'404 - No Such Resource', pageData)
def testFactoryInfo(self):
url = self.getURL('file')
_, _, host, port, _ = client._parse(url)
factory = client.ScrapyHTTPClientFactory(Request(url))
reactor.connectTCP(to_unicode(host), port, factory)
return factory.deferred.addCallback(self._cbFactoryInfo, factory)
def _cbFactoryInfo(self, ignoredResult, factory):
self.assertEqual(factory.status, b'200')
self.assertTrue(factory.version.startswith(b'HTTP/'))
self.assertEqual(factory.message, b'OK')
self.assertEqual(factory.response_headers[b'content-length'], b'10')
def testRedirect(self):
return getPage(self.getURL("redirect")).addCallback(self._cbRedirect)
def _cbRedirect(self, pageData):
self.assertEqual(
pageData,
b'\n<html>\n <head>\n <meta http-equiv="refresh" content="0;URL=/file">\n'
b' </head>\n <body bgcolor="#FFFFFF" text="#000000">\n '
b'<a href="/file">click here</a>\n </body>\n</html>\n')
def test_encoding(self):
""" Test that non-standart body encoding matches
Content-Encoding header """
body = b'\xd0\x81\xd1\x8e\xd0\xaf'
dfd = getPage(self.getURL('encoding'), body=body, response_transform=lambda r: r)
return dfd.addCallback(self._check_Encoding, body)
def _check_Encoding(self, response, original_body):
content_encoding = to_unicode(response.headers[b'Content-Encoding'])
self.assertEqual(content_encoding, EncodingResource.out_encoding)
self.assertEqual(
response.body.decode(content_encoding), to_unicode(original_body))
class WebClientSSLTestCase(unittest.TestCase):
context_factory = None
def _listen(self, site):
return reactor.listenSSL(
0, site,
contextFactory=self.context_factory or ssl_context_factory(),
interface="127.0.0.1")
def getURL(self, path):
return f"https://127.0.0.1:{self.portno}/{path}"
def setUp(self):
self.tmpname = self.mktemp()
os.mkdir(self.tmpname)
FilePath(self.tmpname).child("file").setContent(b"0123456789")
r = static.File(self.tmpname)
r.putChild(b"payload", PayloadResource())
self.site = server.Site(r, timeout=None)
self.wrapper = WrappingFactory(self.site)
self.port = self._listen(self.wrapper)
self.portno = self.port.getHost().port
@inlineCallbacks
def tearDown(self):
yield self.port.stopListening()
shutil.rmtree(self.tmpname)
def testPayload(self):
s = "0123456789" * 10
return getPage(self.getURL("payload"), body=s).addCallback(
self.assertEqual, to_bytes(s))
class WebClientCustomCiphersSSLTestCase(WebClientSSLTestCase):
# we try to use a cipher that is not enabled by default in OpenSSL
custom_ciphers = 'CAMELLIA256-SHA'
context_factory = ssl_context_factory(cipher_string=custom_ciphers)
def testPayload(self):
s = "0123456789" * 10
settings = Settings({'DOWNLOADER_CLIENT_TLS_CIPHERS': self.custom_ciphers})
client_context_factory = create_instance(ScrapyClientContextFactory, settings=settings, crawler=None)
return getPage(
self.getURL("payload"), body=s, contextFactory=client_context_factory
).addCallback(self.assertEqual, to_bytes(s))
def testPayloadDisabledCipher(self):
if sys.implementation.name == "pypy" and parse_version(cryptography.__version__) <= parse_version("2.3.1"):
self.skipTest("This test expects a failure, but the code does work in PyPy with cryptography<=2.3.1")
s = "0123456789" * 10
settings = Settings({'DOWNLOADER_CLIENT_TLS_CIPHERS': 'ECDHE-RSA-AES256-GCM-SHA384'})
client_context_factory = create_instance(ScrapyClientContextFactory, settings=settings, crawler=None)
d = getPage(self.getURL("payload"), body=s, contextFactory=client_context_factory)
return self.assertFailure(d, OpenSSL.SSL.Error)
| bsd-3-clause | -9,011,519,938,139,851,000 | 38.079812 | 115 | 0.601273 | false |
srippa/nn_deep | assignment1/cs231n/classifiers/softmax.py | 3 | 2307 | import numpy as np
from random import shuffle
def softmax_loss_naive(W, X, y, reg):
"""
Softmax loss function, naive implementation (with loops)
Inputs:
- W: C x D array of weights
- X: D x N array of data. Data are D-dimensional columns
- y: 1-dimensional array of length N with labels 0...K-1, for K classes
- reg: (float) regularization strength
Returns:
a tuple of:
- loss as single float
- gradient with respect to weights W, an array of same size as W
"""
# Initialize the loss and gradient to zero.
loss = 0.0
dW = np.zeros_like(W)
#############################################################################
# TODO: Compute the softmax loss and its gradient using explicit loops. #
# Store the loss in loss and the gradient in dW. If you are not careful #
# here, it is easy to run into numeric instability. Don't forget the #
# regularization! #
#############################################################################
pass
#############################################################################
# END OF YOUR CODE #
#############################################################################
return loss, dW
def softmax_loss_vectorized(W, X, y, reg):
"""
Softmax loss function, vectorized version.
Inputs and outputs are the same as softmax_loss_naive.
"""
# Initialize the loss and gradient to zero.
loss = 0.0
dW = np.zeros_like(W)
#############################################################################
# TODO: Compute the softmax loss and its gradient using no explicit loops. #
# Store the loss in loss and the gradient in dW. If you are not careful #
# here, it is easy to run into numeric instability. Don't forget the #
# regularization! #
#############################################################################
pass
#############################################################################
# END OF YOUR CODE #
#############################################################################
return loss, dW
| mit | 5,503,033,794,580,793,000 | 40.196429 | 79 | 0.420026 | false |
opennode/waldur-mastermind | src/waldur_slurm/handlers.py | 1 | 2767 | import functools
from django.conf import settings
from django.db import transaction
from django.db.models import Sum
from waldur_core.core import utils as core_utils
from waldur_freeipa import models as freeipa_models
from . import models, tasks, utils
def if_plugin_enabled(f):
"""Calls decorated handler only if plugin is enabled."""
@functools.wraps(f)
def wrapped(*args, **kwargs):
if settings.WALDUR_SLURM['ENABLED']:
return f(*args, **kwargs)
return wrapped
@if_plugin_enabled
def process_user_creation(sender, instance, created=False, **kwargs):
if not created:
return
transaction.on_commit(
lambda: tasks.add_user.delay(core_utils.serialize_instance(instance))
)
@if_plugin_enabled
def process_user_deletion(sender, instance, **kwargs):
transaction.on_commit(
lambda: tasks.delete_user.delay(core_utils.serialize_instance(instance))
)
@if_plugin_enabled
def process_role_granted(sender, structure, user, role, **kwargs):
try:
freeipa_profile = freeipa_models.Profile.objects.get(user=user)
serialized_profile = core_utils.serialize_instance(freeipa_profile)
serialized_structure = core_utils.serialize_instance(structure)
transaction.on_commit(
lambda: tasks.process_role_granted.delay(
serialized_profile, serialized_structure
)
)
except freeipa_models.Profile.DoesNotExist:
pass
@if_plugin_enabled
def process_role_revoked(sender, structure, user, role, **kwargs):
try:
freeipa_profile = freeipa_models.Profile.objects.get(user=user)
serialized_profile = core_utils.serialize_instance(freeipa_profile)
serialized_structure = core_utils.serialize_instance(structure)
transaction.on_commit(
lambda: tasks.process_role_revoked.delay(
serialized_profile, serialized_structure
)
)
except freeipa_models.Profile.DoesNotExist:
pass
@if_plugin_enabled
def update_quotas_on_allocation_usage_update(sender, instance, created=False, **kwargs):
if created:
return
allocation = instance
if not allocation.usage_changed():
return
project = allocation.project
update_quotas(project, models.Allocation.Permissions.project_path)
update_quotas(project.customer, models.Allocation.Permissions.customer_path)
def update_quotas(scope, path):
qs = models.Allocation.objects.filter(**{path: scope}).values(path)
for quota in utils.FIELD_NAMES:
qs = qs.annotate(**{'total_%s' % quota: Sum(quota)})
qs = list(qs)[0]
for quota in utils.FIELD_NAMES:
scope.set_quota_usage(utils.MAPPING[quota], qs['total_%s' % quota])
| mit | -5,899,177,402,627,099,000 | 29.406593 | 88 | 0.685219 | false |
chris-chambers/llvm | test/CodeGen/SystemZ/Large/spill-01.py | 23 | 1245 | # Test cases where MVC is used for spill slots that end up being out of range.
# RUN: python %s | llc -mtriple=s390x-linux-gnu | FileCheck %s
# There are 8 usable call-saved GPRs, two of which are needed for the base
# registers. The first 160 bytes of the frame are needed for the ABI
# call frame, and a further 8 bytes are needed for the emergency spill slot.
# That means we will have at least one out-of-range slot if:
#
# count == (4096 - 168) / 8 + 6 + 1 == 498
#
# Add in some extra room and check both %r15+4096 (the first out-of-range slot)
# and %r15+4104.
#
# CHECK: f1:
# CHECK: lay [[REG:%r[0-5]]], 4096(%r15)
# CHECK: mvc 0(8,[[REG]]), {{[0-9]+}}({{%r[0-9]+}})
# CHECK: brasl %r14, foo@PLT
# CHECK: lay [[REG:%r[0-5]]], 4096(%r15)
# CHECK: mvc {{[0-9]+}}(8,{{%r[0-9]+}}), 8([[REG]])
# CHECK: br %r14
count = 500
print 'declare void @foo()'
print ''
print 'define void @f1(i64 *%base0, i64 *%base1) {'
for i in range(count):
print ' %%ptr%d = getelementptr i64 *%%base%d, i64 %d' % (i, i % 2, i / 2)
print ' %%val%d = load i64 *%%ptr%d' % (i, i)
print ''
print ' call void @foo()'
print ''
for i in range(count):
print ' store i64 %%val%d, i64 *%%ptr%d' % (i, i)
print ''
print ' ret void'
print '}'
| gpl-3.0 | -7,102,477,263,138,859,000 | 30.125 | 79 | 0.60241 | false |
LuqmanSahaf/kubernetes | third_party/htpasswd/htpasswd.py | 897 | 5219 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2013 Edgewall Software
# Copyright (C) 2008 Eli Carter
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.com/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/.
"""Replacement for htpasswd"""
import os
import sys
import random
from optparse import OptionParser
# We need a crypt module, but Windows doesn't have one by default. Try to find
# one, and tell the user if we can't.
try:
import crypt
except ImportError:
try:
import fcrypt as crypt
except ImportError:
sys.stderr.write("Cannot find a crypt module. "
"Possibly http://carey.geek.nz/code/python-fcrypt/\n")
sys.exit(1)
def wait_for_file_mtime_change(filename):
"""This function is typically called before a file save operation,
waiting if necessary for the file modification time to change. The
purpose is to avoid successive file updates going undetected by the
caching mechanism that depends on a change in the file modification
time to know when the file should be reparsed."""
try:
mtime = os.stat(filename).st_mtime
os.utime(filename, None)
while mtime == os.stat(filename).st_mtime:
time.sleep(1e-3)
os.utime(filename, None)
except OSError:
pass # file doesn't exist (yet)
def salt():
"""Returns a string of 2 randome letters"""
letters = 'abcdefghijklmnopqrstuvwxyz' \
'ABCDEFGHIJKLMNOPQRSTUVWXYZ' \
'0123456789/.'
return random.choice(letters) + random.choice(letters)
class HtpasswdFile:
"""A class for manipulating htpasswd files."""
def __init__(self, filename, create=False):
self.entries = []
self.filename = filename
if not create:
if os.path.exists(self.filename):
self.load()
else:
raise Exception("%s does not exist" % self.filename)
def load(self):
"""Read the htpasswd file into memory."""
lines = open(self.filename, 'r').readlines()
self.entries = []
for line in lines:
username, pwhash = line.split(':')
entry = [username, pwhash.rstrip()]
self.entries.append(entry)
def save(self):
"""Write the htpasswd file to disk"""
wait_for_file_mtime_change(self.filename)
open(self.filename, 'w').writelines(["%s:%s\n" % (entry[0], entry[1])
for entry in self.entries])
def update(self, username, password):
"""Replace the entry for the given user, or add it if new."""
pwhash = crypt.crypt(password, salt())
matching_entries = [entry for entry in self.entries
if entry[0] == username]
if matching_entries:
matching_entries[0][1] = pwhash
else:
self.entries.append([username, pwhash])
def delete(self, username):
"""Remove the entry for the given user."""
self.entries = [entry for entry in self.entries
if entry[0] != username]
def main():
"""
%prog -b[c] filename username password
%prog -D filename username"""
# For now, we only care about the use cases that affect tests/functional.py
parser = OptionParser(usage=main.__doc__)
parser.add_option('-b', action='store_true', dest='batch', default=False,
help='Batch mode; password is passed on the command line IN THE CLEAR.'
)
parser.add_option('-c', action='store_true', dest='create', default=False,
help='Create a new htpasswd file, overwriting any existing file.')
parser.add_option('-D', action='store_true', dest='delete_user',
default=False, help='Remove the given user from the password file.')
options, args = parser.parse_args()
def syntax_error(msg):
"""Utility function for displaying fatal error messages with usage
help.
"""
sys.stderr.write("Syntax error: " + msg)
sys.stderr.write(parser.get_usage())
sys.exit(1)
if not (options.batch or options.delete_user):
syntax_error("Only batch and delete modes are supported\n")
# Non-option arguments
if len(args) < 2:
syntax_error("Insufficient number of arguments.\n")
filename, username = args[:2]
if options.delete_user:
if len(args) != 2:
syntax_error("Incorrect number of arguments.\n")
password = None
else:
if len(args) != 3:
syntax_error("Incorrect number of arguments.\n")
password = args[2]
passwdfile = HtpasswdFile(filename, create=options.create)
if options.delete_user:
passwdfile.delete(username)
else:
passwdfile.update(username, password)
passwdfile.save()
if __name__ == '__main__':
main()
| apache-2.0 | 4,181,964,981,834,340,000 | 33.111111 | 79 | 0.621 | false |
AltSchool/django | django/contrib/gis/utils/srs.py | 45 | 3041 | from django.contrib.gis.gdal import SpatialReference
from django.db import DEFAULT_DB_ALIAS, connections
def add_srs_entry(srs, auth_name='EPSG', auth_srid=None, ref_sys_name=None,
database=None):
"""
This function takes a GDAL SpatialReference system and adds its information
to the `spatial_ref_sys` table of the spatial backend. Doing this enables
database-level spatial transformations for the backend. Thus, this utility
is useful for adding spatial reference systems not included by default with
the backend:
>>> from django.contrib.gis.utils import add_srs_entry
>>> add_srs_entry(3857)
Keyword Arguments:
auth_name:
This keyword may be customized with the value of the `auth_name` field.
Defaults to 'EPSG'.
auth_srid:
This keyword may be customized with the value of the `auth_srid` field.
Defaults to the SRID determined by GDAL.
ref_sys_name:
For SpatiaLite users only, sets the value of the `ref_sys_name` field.
Defaults to the name determined by GDAL.
database:
The name of the database connection to use; the default is the value
of `django.db.DEFAULT_DB_ALIAS` (at the time of this writing, its value
is 'default').
"""
if not database:
database = DEFAULT_DB_ALIAS
connection = connections[database]
if not hasattr(connection.ops, 'spatial_version'):
raise Exception('The `add_srs_entry` utility only works '
'with spatial backends.')
if not connection.features.supports_add_srs_entry:
raise Exception('This utility does not support your database backend.')
SpatialRefSys = connection.ops.spatial_ref_sys()
# If argument is not a `SpatialReference` instance, use it as parameter
# to construct a `SpatialReference` instance.
if not isinstance(srs, SpatialReference):
srs = SpatialReference(srs)
if srs.srid is None:
raise Exception('Spatial reference requires an SRID to be '
'compatible with the spatial backend.')
# Initializing the keyword arguments dictionary for both PostGIS
# and SpatiaLite.
kwargs = {'srid': srs.srid,
'auth_name': auth_name,
'auth_srid': auth_srid or srs.srid,
'proj4text': srs.proj4,
}
# Backend-specific fields for the SpatialRefSys model.
srs_field_names = {f.name for f in SpatialRefSys._meta.get_fields()}
if 'srtext' in srs_field_names:
kwargs['srtext'] = srs.wkt
if 'ref_sys_name' in srs_field_names:
# Spatialite specific
kwargs['ref_sys_name'] = ref_sys_name or srs.name
# Creating the spatial_ref_sys model.
try:
# Try getting via SRID only, because using all kwargs may
# differ from exact wkt/proj in database.
SpatialRefSys.objects.using(database).get(srid=srs.srid)
except SpatialRefSys.DoesNotExist:
SpatialRefSys.objects.using(database).create(**kwargs)
| bsd-3-clause | 6,989,093,889,395,206,000 | 38.493506 | 79 | 0.665899 | false |
kaushik94/sympy | sympy/diffgeom/tests/test_hyperbolic_space.py | 22 | 2583 | r'''
unit test describing the hyperbolic half-plane with the Poincare metric. This
is a basic model of hyperbolic geometry on the (positive) half-space
{(x,y) \in R^2 | y > 0}
with the Riemannian metric
ds^2 = (dx^2 + dy^2)/y^2
It has constant negative scalar curvature = -2
https://en.wikipedia.org/wiki/Poincare_half-plane_model
'''
from sympy import diag
from sympy.diffgeom import (twoform_to_matrix,
metric_to_Christoffel_1st, metric_to_Christoffel_2nd,
metric_to_Riemann_components, metric_to_Ricci_components)
import sympy.diffgeom.rn
from sympy.tensor.array import ImmutableDenseNDimArray
def test_H2():
TP = sympy.diffgeom.TensorProduct
R2 = sympy.diffgeom.rn.R2
y = R2.y
dy = R2.dy
dx = R2.dx
g = (TP(dx, dx) + TP(dy, dy))*y**(-2)
automat = twoform_to_matrix(g)
mat = diag(y**(-2), y**(-2))
assert mat == automat
gamma1 = metric_to_Christoffel_1st(g)
assert gamma1[0, 0, 0] == 0
assert gamma1[0, 0, 1] == -y**(-3)
assert gamma1[0, 1, 0] == -y**(-3)
assert gamma1[0, 1, 1] == 0
assert gamma1[1, 1, 1] == -y**(-3)
assert gamma1[1, 1, 0] == 0
assert gamma1[1, 0, 1] == 0
assert gamma1[1, 0, 0] == y**(-3)
gamma2 = metric_to_Christoffel_2nd(g)
assert gamma2[0, 0, 0] == 0
assert gamma2[0, 0, 1] == -y**(-1)
assert gamma2[0, 1, 0] == -y**(-1)
assert gamma2[0, 1, 1] == 0
assert gamma2[1, 1, 1] == -y**(-1)
assert gamma2[1, 1, 0] == 0
assert gamma2[1, 0, 1] == 0
assert gamma2[1, 0, 0] == y**(-1)
Rm = metric_to_Riemann_components(g)
assert Rm[0, 0, 0, 0] == 0
assert Rm[0, 0, 0, 1] == 0
assert Rm[0, 0, 1, 0] == 0
assert Rm[0, 0, 1, 1] == 0
assert Rm[0, 1, 0, 0] == 0
assert Rm[0, 1, 0, 1] == -y**(-2)
assert Rm[0, 1, 1, 0] == y**(-2)
assert Rm[0, 1, 1, 1] == 0
assert Rm[1, 0, 0, 0] == 0
assert Rm[1, 0, 0, 1] == y**(-2)
assert Rm[1, 0, 1, 0] == -y**(-2)
assert Rm[1, 0, 1, 1] == 0
assert Rm[1, 1, 0, 0] == 0
assert Rm[1, 1, 0, 1] == 0
assert Rm[1, 1, 1, 0] == 0
assert Rm[1, 1, 1, 1] == 0
Ric = metric_to_Ricci_components(g)
assert Ric[0, 0] == -y**(-2)
assert Ric[0, 1] == 0
assert Ric[1, 0] == 0
assert Ric[0, 0] == -y**(-2)
assert Ric == ImmutableDenseNDimArray([-y**(-2), 0, 0, -y**(-2)], (2, 2))
## scalar curvature is -2
#TODO - it would be nice to have index contraction built-in
R = (Ric[0, 0] + Ric[1, 1])*y**2
assert R == -2
## Gauss curvature is -1
assert R/2 == -1
| bsd-3-clause | 823,497,478,222,230,900 | 27.384615 | 85 | 0.538521 | false |
Lancey6/redwind | migrations/20141130-eliminate-duplicate-tags.py | 3 | 2273 | """
"""
import os
import json
from sqlalchemy import (create_engine, Table, Column, String, Integer,
Float, Text, MetaData, select, ForeignKey,
bindparam, delete, and_)
from config import Configuration
engine = create_engine(Configuration.SQLALCHEMY_DATABASE_URI, echo=True)
metadata = MetaData()
tags = Table(
'tag', metadata,
Column('id', Integer, primary_key=True),
Column('name', String),
)
posts = Table(
'post', metadata,
Column('id', Integer, primary_key=True),
)
posts_to_tags = Table(
'posts_to_tags', metadata,
Column('tag_id', Integer, ForeignKey('tag.id')),
Column('post_id', Integer, ForeignKey('post.id')),
)
def eliminate_duplicates(conn):
tag_map = {}
update_batch = []
delete_batch = []
for row in conn.execute(
select([posts, tags]).select_from(
posts.join(posts_to_tags).join(tags)
).order_by(tags.c.id)):
post_id = row[0]
tag_id = row[1]
tag_name = row[2]
# possible duplicate
if tag_name in tag_map:
preexisting_tag_id = tag_map.get(tag_name)
if preexisting_tag_id != tag_id:
update_batch.append({
'the_post_id': post_id,
'old_tag_id': tag_id,
'new_tag_id': preexisting_tag_id,
})
delete_batch.append({
'the_tag_id': tag_id,
})
else:
tag_map[tag_name] = tag_id
print('update batch', update_batch)
if update_batch:
update_stmt = posts_to_tags.update().where(
and_(
posts_to_tags.c.post_id == bindparam('the_post_id'),
posts_to_tags.c.tag_id == bindparam('old_tag_id')
)
).values(tag_id=bindparam('new_tag_id'))
# print(update_stmt)
# print(update_batch)
conn.execute(update_stmt, update_batch)
print('delete batch', delete_batch)
if delete_batch:
delete_stmt = tags.delete().where(tags.c.id == bindparam('the_tag_id'))
# print(delete_stmt)
conn.execute(delete_stmt, delete_batch)
with engine.begin() as conn:
eliminate_duplicates(conn)
| bsd-2-clause | -4,518,706,428,343,623,000 | 27.4125 | 79 | 0.547734 | false |
mega-force/osmc | package/mediacenter-skin-osmc/files/usr/share/kodi/addons/script.module.unidecode/lib/unidecode/x078.py | 252 | 4648 | data = (
'Dang ', # 0x00
'Ma ', # 0x01
'Sha ', # 0x02
'Dan ', # 0x03
'Jue ', # 0x04
'Li ', # 0x05
'Fu ', # 0x06
'Min ', # 0x07
'Nuo ', # 0x08
'Huo ', # 0x09
'Kang ', # 0x0a
'Zhi ', # 0x0b
'Qi ', # 0x0c
'Kan ', # 0x0d
'Jie ', # 0x0e
'Fen ', # 0x0f
'E ', # 0x10
'Ya ', # 0x11
'Pi ', # 0x12
'Zhe ', # 0x13
'Yan ', # 0x14
'Sui ', # 0x15
'Zhuan ', # 0x16
'Che ', # 0x17
'Dun ', # 0x18
'Pan ', # 0x19
'Yan ', # 0x1a
'[?] ', # 0x1b
'Feng ', # 0x1c
'Fa ', # 0x1d
'Mo ', # 0x1e
'Zha ', # 0x1f
'Qu ', # 0x20
'Yu ', # 0x21
'Luo ', # 0x22
'Tuo ', # 0x23
'Tuo ', # 0x24
'Di ', # 0x25
'Zhai ', # 0x26
'Zhen ', # 0x27
'Ai ', # 0x28
'Fei ', # 0x29
'Mu ', # 0x2a
'Zhu ', # 0x2b
'Li ', # 0x2c
'Bian ', # 0x2d
'Nu ', # 0x2e
'Ping ', # 0x2f
'Peng ', # 0x30
'Ling ', # 0x31
'Pao ', # 0x32
'Le ', # 0x33
'Po ', # 0x34
'Bo ', # 0x35
'Po ', # 0x36
'Shen ', # 0x37
'Za ', # 0x38
'Nuo ', # 0x39
'Li ', # 0x3a
'Long ', # 0x3b
'Tong ', # 0x3c
'[?] ', # 0x3d
'Li ', # 0x3e
'Aragane ', # 0x3f
'Chu ', # 0x40
'Keng ', # 0x41
'Quan ', # 0x42
'Zhu ', # 0x43
'Kuang ', # 0x44
'Huo ', # 0x45
'E ', # 0x46
'Nao ', # 0x47
'Jia ', # 0x48
'Lu ', # 0x49
'Wei ', # 0x4a
'Ai ', # 0x4b
'Luo ', # 0x4c
'Ken ', # 0x4d
'Xing ', # 0x4e
'Yan ', # 0x4f
'Tong ', # 0x50
'Peng ', # 0x51
'Xi ', # 0x52
'[?] ', # 0x53
'Hong ', # 0x54
'Shuo ', # 0x55
'Xia ', # 0x56
'Qiao ', # 0x57
'[?] ', # 0x58
'Wei ', # 0x59
'Qiao ', # 0x5a
'[?] ', # 0x5b
'Keng ', # 0x5c
'Xiao ', # 0x5d
'Que ', # 0x5e
'Chan ', # 0x5f
'Lang ', # 0x60
'Hong ', # 0x61
'Yu ', # 0x62
'Xiao ', # 0x63
'Xia ', # 0x64
'Mang ', # 0x65
'Long ', # 0x66
'Iong ', # 0x67
'Che ', # 0x68
'Che ', # 0x69
'E ', # 0x6a
'Liu ', # 0x6b
'Ying ', # 0x6c
'Mang ', # 0x6d
'Que ', # 0x6e
'Yan ', # 0x6f
'Sha ', # 0x70
'Kun ', # 0x71
'Yu ', # 0x72
'[?] ', # 0x73
'Kaki ', # 0x74
'Lu ', # 0x75
'Chen ', # 0x76
'Jian ', # 0x77
'Nue ', # 0x78
'Song ', # 0x79
'Zhuo ', # 0x7a
'Keng ', # 0x7b
'Peng ', # 0x7c
'Yan ', # 0x7d
'Zhui ', # 0x7e
'Kong ', # 0x7f
'Ceng ', # 0x80
'Qi ', # 0x81
'Zong ', # 0x82
'Qing ', # 0x83
'Lin ', # 0x84
'Jun ', # 0x85
'Bo ', # 0x86
'Ding ', # 0x87
'Min ', # 0x88
'Diao ', # 0x89
'Jian ', # 0x8a
'He ', # 0x8b
'Lu ', # 0x8c
'Ai ', # 0x8d
'Sui ', # 0x8e
'Que ', # 0x8f
'Ling ', # 0x90
'Bei ', # 0x91
'Yin ', # 0x92
'Dui ', # 0x93
'Wu ', # 0x94
'Qi ', # 0x95
'Lun ', # 0x96
'Wan ', # 0x97
'Dian ', # 0x98
'Gang ', # 0x99
'Pei ', # 0x9a
'Qi ', # 0x9b
'Chen ', # 0x9c
'Ruan ', # 0x9d
'Yan ', # 0x9e
'Die ', # 0x9f
'Ding ', # 0xa0
'Du ', # 0xa1
'Tuo ', # 0xa2
'Jie ', # 0xa3
'Ying ', # 0xa4
'Bian ', # 0xa5
'Ke ', # 0xa6
'Bi ', # 0xa7
'Wei ', # 0xa8
'Shuo ', # 0xa9
'Zhen ', # 0xaa
'Duan ', # 0xab
'Xia ', # 0xac
'Dang ', # 0xad
'Ti ', # 0xae
'Nao ', # 0xaf
'Peng ', # 0xb0
'Jian ', # 0xb1
'Di ', # 0xb2
'Tan ', # 0xb3
'Cha ', # 0xb4
'Seki ', # 0xb5
'Qi ', # 0xb6
'[?] ', # 0xb7
'Feng ', # 0xb8
'Xuan ', # 0xb9
'Que ', # 0xba
'Que ', # 0xbb
'Ma ', # 0xbc
'Gong ', # 0xbd
'Nian ', # 0xbe
'Su ', # 0xbf
'E ', # 0xc0
'Ci ', # 0xc1
'Liu ', # 0xc2
'Si ', # 0xc3
'Tang ', # 0xc4
'Bang ', # 0xc5
'Hua ', # 0xc6
'Pi ', # 0xc7
'Wei ', # 0xc8
'Sang ', # 0xc9
'Lei ', # 0xca
'Cuo ', # 0xcb
'Zhen ', # 0xcc
'Xia ', # 0xcd
'Qi ', # 0xce
'Lian ', # 0xcf
'Pan ', # 0xd0
'Wei ', # 0xd1
'Yun ', # 0xd2
'Dui ', # 0xd3
'Zhe ', # 0xd4
'Ke ', # 0xd5
'La ', # 0xd6
'[?] ', # 0xd7
'Qing ', # 0xd8
'Gun ', # 0xd9
'Zhuan ', # 0xda
'Chan ', # 0xdb
'Qi ', # 0xdc
'Ao ', # 0xdd
'Peng ', # 0xde
'Lu ', # 0xdf
'Lu ', # 0xe0
'Kan ', # 0xe1
'Qiang ', # 0xe2
'Chen ', # 0xe3
'Yin ', # 0xe4
'Lei ', # 0xe5
'Biao ', # 0xe6
'Qi ', # 0xe7
'Mo ', # 0xe8
'Qi ', # 0xe9
'Cui ', # 0xea
'Zong ', # 0xeb
'Qing ', # 0xec
'Chuo ', # 0xed
'[?] ', # 0xee
'Ji ', # 0xef
'Shan ', # 0xf0
'Lao ', # 0xf1
'Qu ', # 0xf2
'Zeng ', # 0xf3
'Deng ', # 0xf4
'Jian ', # 0xf5
'Xi ', # 0xf6
'Lin ', # 0xf7
'Ding ', # 0xf8
'Dian ', # 0xf9
'Huang ', # 0xfa
'Pan ', # 0xfb
'Za ', # 0xfc
'Qiao ', # 0xfd
'Di ', # 0xfe
'Li ', # 0xff
)
| gpl-2.0 | 8,636,432,027,483,705,000 | 17.015504 | 21 | 0.386833 | false |
voodka/ghostbakup | node_modules/testem/node_modules/socket.io/node_modules/engine.io/node_modules/engine.io-parser/node_modules/utf8/tests/generate-test-data.py | 1788 | 1435 | #!/usr/bin/env python
import re
import json
# https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae
# http://stackoverflow.com/a/13436167/96656
def unisymbol(codePoint):
if codePoint >= 0x0000 and codePoint <= 0xFFFF:
return unichr(codePoint)
elif codePoint >= 0x010000 and codePoint <= 0x10FFFF:
highSurrogate = int((codePoint - 0x10000) / 0x400) + 0xD800
lowSurrogate = int((codePoint - 0x10000) % 0x400) + 0xDC00
return unichr(highSurrogate) + unichr(lowSurrogate)
else:
return 'Error'
def hexify(codePoint):
return 'U+' + hex(codePoint)[2:].upper().zfill(6)
def writeFile(filename, contents):
print filename
with open(filename, 'w') as f:
f.write(contents.strip() + '\n')
data = []
for codePoint in range(0x000000, 0x10FFFF + 1):
# Skip non-scalar values.
if codePoint >= 0xD800 and codePoint <= 0xDFFF:
continue
symbol = unisymbol(codePoint)
# http://stackoverflow.com/a/17199950/96656
bytes = symbol.encode('utf8').decode('latin1')
data.append({
'codePoint': codePoint,
'decoded': symbol,
'encoded': bytes
});
jsonData = json.dumps(data, sort_keys=False, indent=2, separators=(',', ': '))
# Use tabs instead of double spaces for indentation
jsonData = jsonData.replace(' ', '\t')
# Escape hexadecimal digits in escape sequences
jsonData = re.sub(
r'\\u([a-fA-F0-9]{4})',
lambda match: r'\u{}'.format(match.group(1).upper()),
jsonData
)
writeFile('data.json', jsonData)
| mit | 3,253,380,593,608,419,000 | 27.7 | 78 | 0.70453 | false |
sbc100/native_client | src/trusted/validator_ragel/PRESUBMIT.py | 12 | 4775 | # Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Documentation on PRESUBMIT.py can be found at:
# http://www.chromium.org/developers/how-tos/depottools/presubmit-scripts
import json
import hashlib
import os
import re
import gclient_utils
def CheckChange(input_api, message_constructor):
"""Checks for files with a modified contents.
Some checking of validator happens on builbots, but comprehensive enumeration
tests must be run locally.
There are two dangers:
1. Source code for autogenerated files can be modified without regeneration
of said files.
2. Source of validator can be changed without running the aforementioned
tests.
This function catches the situation when source files for validator_x86_??.c
are changed but files are not regenerated and it also catches the situation
when code is changed without running the dfacheckvalidator tests.
"""
errors = []
changelist = input_api.change
root_path = changelist.RepositoryRoot()
if input_api.change.scm == 'svn':
try:
# With SVN you can decide to commit not all modified files but some of
# them thus separate GetAllModifiedFiles() and GetModifiedFiles() lists
# are provided. We need to remove root_path from the name of file.
assert all(filename.startswith(root_path + os.path.sep)
for filename in changelist.GetAllModifiedFiles())
all_filenames = [filename[len(root_path + os.path.sep):]
for filename in changelist.GetAllModifiedFiles()]
assert all(filename.startswith(root_path + os.path.sep)
for filename in changelist.GetModifiedFiles())
modified_filenames = [filename[len(root_path + os.path.sep):]
for filename in changelist.GetModifiedFiles()]
except:
# If gcl is not available (which happens in CQ bots) then we'll try to use
# AffectedFiles() instead of GetAllModifiedFiles()
all_filenames = [file.LocalPath() for file in changelist.AffectedFiles()]
modified_filenames = all_filenames
else:
# With GIT you must commit all modified files thus only AffectedFiles()
# list is provided.
all_filenames = [file.LocalPath() for file in changelist.AffectedFiles()]
modified_filenames = all_filenames
json_filename = os.path.join(
'src', 'trusted', 'validator_ragel', 'gen', 'protected_files.json')
protected_files = json.loads(
gclient_utils.FileRead(os.path.join(root_path, json_filename)))
need_dfagen = False
need_dfacheckvalidator = False
canonical_prefix = 'native_client/'
for filename in sorted(all_filenames):
canonical_filename = canonical_prefix + filename.replace('\\', '/')
if canonical_filename in protected_files['validator']:
file_contents = gclient_utils.FileRead(os.path.join(root_path, filename))
sha512 = hashlib.sha512(file_contents).hexdigest()
if sha512 != protected_files['validator'][canonical_filename]:
errors.append(message_constructor(
'Incorrect {0} hash:\n expected {1}\n got {2}'.format(
canonical_filename,
protected_files['validator'][canonical_filename],
sha512)))
need_dfacheckvalidator = True
if canonical_filename in protected_files['generating']:
for automaton_filename in protected_files['generated']:
if (os.stat(os.path.join(root_path, filename)).st_mtime >
os.stat(os.path.join(root_path,
automaton_filename[len(canonical_prefix):])).st_mtime):
errors.append(message_constructor(
'File {0} is older then {1}'.format(
automaton_filename, canonical_filename)))
need_dfagen = True
if (canonical_filename in protected_files['validator'] or
canonical_filename in protected_files['generating'] or
filename == json_filename):
if filename not in modified_filenames:
errors.append(message_constructor(
'File {0} is changed but is excluded from this CL'.format(
canonical_filename)))
if need_dfagen:
errors.append(message_constructor(
'Please run "./scons dfagen" before commit!'))
if need_dfacheckvalidator:
errors.append(message_constructor(
'Please run "./scons dfacheckvalidator" before commit!'))
return errors
def CheckChangeOnUpload(input_api, output_api):
return CheckChange(input_api,
message_constructor=output_api.PresubmitPromptWarning)
def CheckChangeOnCommit(input_api, output_api):
return CheckChange(input_api,
message_constructor=output_api.PresubmitError)
| bsd-3-clause | -7,773,188,843,427,686,000 | 38.791667 | 80 | 0.687539 | false |
tudorbarascu/QGIS | tests/src/python/test_qgsscalewidget.py | 15 | 2940 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsScaleWidget
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '13/03/2019'
__copyright__ = 'Copyright 2019, The QGIS Project'
import qgis # NOQA
import math
from qgis.PyQt.QtCore import Qt
from qgis.PyQt.QtTest import QSignalSpy
from qgis.gui import QgsScaleWidget
from qgis.testing import start_app, unittest
start_app()
class TestQgsScaleWidget(unittest.TestCase):
def testBasic(self):
w = QgsScaleWidget()
spy = QSignalSpy(w.scaleChanged)
w.setScaleString('1:2345')
self.assertEqual(w.scaleString(), '1:2,345')
self.assertEqual(w.scale(), 2345)
self.assertEqual(len(spy), 1)
self.assertEqual(spy[-1][0], 2345)
w.setScaleString('0.02')
self.assertEqual(w.scaleString(), '1:50')
self.assertEqual(w.scale(), 50)
self.assertEqual(len(spy), 2)
self.assertEqual(spy[-1][0], 50)
w.setScaleString('1:4,000')
self.assertEqual(w.scaleString(), '1:4,000')
self.assertEqual(w.scale(), 4000)
self.assertEqual(len(spy), 3)
self.assertEqual(spy[-1][0], 4000)
def testNull(self):
w = QgsScaleWidget()
w.setScale(50)
self.assertFalse(w.allowNull())
w.setNull() # no effect
self.assertEqual(w.scale(), 50.0)
self.assertFalse(w.isNull())
spy = QSignalSpy(w.scaleChanged)
w.setAllowNull(True)
self.assertTrue(w.allowNull())
w.setScaleString('')
self.assertEqual(len(spy), 1)
self.assertTrue(math.isnan(w.scale()))
self.assertTrue(math.isnan(spy[-1][0]))
self.assertTrue(w.isNull())
w.setScaleString(" ")
self.assertTrue(math.isnan(w.scale()))
self.assertTrue(w.isNull())
w.setScaleString('0.02')
self.assertEqual(w.scale(), 50.0)
self.assertEqual(len(spy), 2)
self.assertEqual(spy[-1][0], 50.0)
self.assertFalse(w.isNull())
w.setScaleString('')
self.assertTrue(math.isnan(w.scale()))
self.assertEqual(len(spy), 3)
self.assertTrue(math.isnan(spy[-1][0]))
self.assertTrue(w.isNull())
w.setScaleString('0.02')
self.assertEqual(w.scale(), 50.0)
self.assertEqual(len(spy), 4)
self.assertEqual(spy[-1][0], 50.0)
self.assertFalse(w.isNull())
w.setNull()
self.assertTrue(math.isnan(w.scale()))
self.assertEqual(len(spy), 5)
self.assertTrue(math.isnan(spy[-1][0]))
self.assertTrue(w.isNull())
w.setAllowNull(False)
self.assertFalse(w.allowNull())
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 8,558,736,179,206,132,000 | 29.309278 | 78 | 0.611905 | false |
aferr/TemporalPartitioningMemCtl | tests/configs/o3-timing.py | 14 | 2408 | # Copyright (c) 2006-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Steve Reinhardt
import m5
from m5.objects import *
m5.util.addToPath('../configs/common')
class MyCache(BaseCache):
assoc = 2
block_size = 64
latency = '1ns'
mshrs = 10
tgts_per_mshr = 5
class MyL1Cache(MyCache):
is_top_level = True
tgts_per_mshr = 20
cpu = DerivO3CPU(cpu_id=0)
cpu.addTwoLevelCacheHierarchy(MyL1Cache(size = '128kB'),
MyL1Cache(size = '256kB'),
MyCache(size = '2MB'))
cpu.clock = '2GHz'
system = System(cpu = cpu,
physmem = SimpleMemory(),
membus = CoherentBus())
system.system_port = system.membus.slave
system.physmem.port = system.membus.master
# create the interrupt controller
cpu.createInterruptController()
cpu.connectAllPorts(system.membus)
root = Root(full_system = False, system = system)
| bsd-3-clause | -3,542,440,669,898,528,300 | 39.813559 | 72 | 0.739203 | false |
jirikuncar/invenio | invenio/testsuite/test_ext_email.py | 16 | 11033 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
Test unit for the miscutil/mailutils module.
"""
import os
import sys
import pkg_resources
from base64 import encodestring
from six import iteritems, StringIO
from flask import current_app
from invenio.ext.email import send_email
from invenio.testsuite import make_test_suite, run_test_suite, InvenioTestCase
class MailTestCase(InvenioTestCase):
EMAIL_BACKEND = 'flask_email.backends.console.Mail'
def setUp(self):
super(MailTestCase, self).setUp()
current_app.config['EMAIL_BACKEND'] = self.EMAIL_BACKEND
self.__stdout = sys.stdout
self.stream = sys.stdout = StringIO()
def tearDown(self):
del self.stream
sys.stdout = self.__stdout
del self.__stdout
super(MailTestCase, self).tearDown()
def flush_mailbox(self):
self.stream = sys.stdout = StringIO()
#def get_mailbox_content(self):
# messages = self.stream.getvalue().split('\n' + ('-' * 79) + '\n')
# return [message_from_string(m) for m in messages if m]
class TestMailUtils(MailTestCase):
"""
mailutils TestSuite.
"""
def test_console_send_email(self):
"""
Test that the console backend can be pointed at an arbitrary stream.
"""
msg_content = """Content-Type: text/plain; charset="utf-8"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: Subject
From: [email protected]
To: [email protected]"""
send_email('[email protected]', ['[email protected]'], subject='Subject',
content='Content')
self.assertIn(msg_content, sys.stdout.getvalue())
self.flush_mailbox()
send_email('[email protected]', '[email protected]', subject='Subject',
content='Content')
self.assertIn(msg_content, sys.stdout.getvalue())
self.flush_mailbox()
def test_email_text_template(self):
"""
Test email text template engine.
"""
from invenio.ext.template import render_template_to_string
contexts = {
'ctx1': {'content': 'Content 1'},
'ctx2': {'content': 'Content 2', 'header': 'Header 2'},
'ctx3': {'content': 'Content 3', 'footer': 'Footer 3'},
'ctx4': {'content': 'Content 4', 'header': 'Header 4', 'footer': 'Footer 4'}
}
msg_content = """Content-Type: text/plain; charset="utf-8"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: %s
From: [email protected]
To: [email protected]"""
for name, ctx in iteritems(contexts):
msg = render_template_to_string('mail_text.tpl', **ctx)
send_email('[email protected]', ['[email protected]'], subject=name,
**ctx)
email = sys.stdout.getvalue()
self.assertIn(msg_content % name, email)
self.assertIn(msg, email)
self.flush_mailbox()
def test_email_html_template(self):
"""
Test email html template engine.
"""
from invenio.ext.template import render_template_to_string
contexts = {
'ctx1': {'html_content': '<b>Content 1</b>'},
'ctx2': {'html_content': '<b>Content 2</b>',
'html_header': '<h1>Header 2</h1>'},
'ctx3': {'html_content': '<b>Content 3</b>',
'html_footer': '<i>Footer 3</i>'},
'ctx4': {'html_content': '<b>Content 4</b>',
'html_header': '<h1>Header 4</h1>',
'html_footer': '<i>Footer 4</i>'}
}
def strip_html_key(ctx):
return dict(map(lambda (k, v): (k[5:], v), iteritems(ctx)))
for name, ctx in iteritems(contexts):
msg = render_template_to_string('mail_html.tpl',
**strip_html_key(ctx))
send_email('[email protected]', ['[email protected]'], subject=name,
content='Content Text', **ctx)
email = sys.stdout.getvalue()
self.assertIn('Content-Type: multipart/alternative;', email)
self.assertIn('Content Text', email)
self.assertIn(msg, email)
self.flush_mailbox()
def test_email_html_image(self):
"""
Test sending html message with an image.
"""
html_images = {
'img1': pkg_resources.resource_filename(
'invenio.base',
os.path.join('static', 'img', 'journal_water_dog.gif')
)
}
send_email('[email protected]', ['[email protected]'],
subject='Subject', content='Content Text',
html_content='<img src="cid:img1"/>',
html_images=html_images)
email = sys.stdout.getvalue()
self.assertIn('Content Text', email)
self.assertIn('<img src="cid:img1"/>', email)
with open(html_images['img1'], 'r') as f:
self.assertIn(encodestring(f.read()), email)
self.flush_mailbox()
def test_sending_attachment(self):
"""
Test sending email with an attachment.
"""
attachments = [
pkg_resources.resource_filename(
'invenio.base',
os.path.join('static', 'img', 'journal_header.png')
)
]
send_email('[email protected]', ['[email protected]'],
subject='Subject', content='Content Text',
attachments=attachments)
email = sys.stdout.getvalue()
self.assertIn('Content Text', email)
# First attachemnt is image/png
self.assertIn('Content-Type: image/png', email)
for attachment in attachments:
with open(attachment, 'r') as f:
self.assertIn(encodestring(f.read()), email)
self.flush_mailbox()
def test_single_recipient(self):
"""
Test that the email receivers are hidden.
"""
msg_content = """Content-Type: text/plain; charset="utf-8"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: Subject
From: [email protected]
To: [email protected]"""
send_email('[email protected]', ['[email protected]'],
subject='Subject', content='Content')
email = sys.stdout.getvalue()
self.assertIn(msg_content, email)
self.flush_mailbox()
send_email('[email protected]', '[email protected]',
subject='Subject', content='Content')
email = sys.stdout.getvalue()
self.assertIn(msg_content, email)
self.flush_mailbox()
def test_bbc_undisclosed_recipients(self):
"""
Test that the email receivers are hidden.
"""
msg_content = """Content-Type: text/plain; charset="utf-8"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: Subject
From: [email protected]
To: Undisclosed.Recipients:"""
send_email('[email protected]', ['[email protected]', '[email protected]'],
subject='Subject', content='Content')
email = sys.stdout.getvalue()
self.assertIn(msg_content, email)
self.assertNotIn('Bcc: [email protected],[email protected]', email)
self.flush_mailbox()
send_email('[email protected]', '[email protected], [email protected]',
subject='Subject', content='Content')
email = sys.stdout.getvalue()
self.assertIn(msg_content, email)
self.assertNotIn('Bcc: [email protected],[email protected]', email)
self.flush_mailbox()
class TestAdminMailBackend(MailTestCase):
EMAIL_BACKEND = 'invenio.ext.email.backends.console_adminonly.Mail'
ADMIN_MESSAGE = "This message would have been sent to the following recipients"
def test_simple_email_header(self):
"""
Test simple email header.
"""
from invenio.config import CFG_SITE_ADMIN_EMAIL
from invenio.ext.template import render_template_to_string
msg_content = """Content-Type: text/plain; charset="utf-8"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: Subject
From: [email protected]
To: %s""" % (CFG_SITE_ADMIN_EMAIL, )
msg = render_template_to_string('mail_text.tpl', content='Content')
self.flush_mailbox()
send_email('[email protected]', ['[email protected]'], subject='Subject',
content='Content')
email = self.stream.getvalue()
self.assertIn(msg_content, email)
self.assertIn(self.ADMIN_MESSAGE, email)
self.assertNotIn('Bcc:', email)
self.assertIn(msg, email)
self.flush_mailbox()
send_email('[email protected]', '[email protected]', subject='Subject',
content='Content')
email = self.stream.getvalue()
self.assertIn(msg_content, email)
self.assertIn(self.ADMIN_MESSAGE, email)
self.assertNotIn('Bcc:', email)
self.assertIn(msg, email)
self.flush_mailbox()
def test_cc_bcc_headers(self):
"""
Test that no Cc and Bcc headers are sent.
"""
from invenio.config import CFG_SITE_ADMIN_EMAIL
msg_content = """Content-Type: text/plain; charset="utf-8"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: Subject
From: [email protected]
To: %s""" % (CFG_SITE_ADMIN_EMAIL, )
send_email('[email protected]', ['[email protected]', '[email protected]'],
subject='Subject', content='Content')
email = self.stream.getvalue()
self.assertIn(msg_content, email)
self.assertIn(self.ADMIN_MESSAGE, email)
self.assertIn('[email protected],[email protected]', email)
self.assertNotIn('Bcc: [email protected],[email protected]', email)
self.flush_mailbox()
send_email('[email protected]', '[email protected], [email protected]',
subject='Subject', content='Content')
email = self.stream.getvalue()
self.assertIn(msg_content, email)
self.assertIn(self.ADMIN_MESSAGE, email)
self.assertIn('[email protected],[email protected]', email)
self.assertNotIn('Bcc: [email protected],[email protected]', email)
self.flush_mailbox()
TEST_SUITE = make_test_suite(TestMailUtils, TestAdminMailBackend)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)
| gpl-2.0 | 1,295,170,862,831,917,800 | 34.249201 | 88 | 0.597662 | false |
sholtebeck/knarflog | lib/werkzeug/formparser.py | 3 | 21790 | # -*- coding: utf-8 -*-
"""
werkzeug.formparser
~~~~~~~~~~~~~~~~~~~
This module implements the form parsing. It supports url-encoded forms
as well as non-nested multipart uploads.
:copyright: 2007 Pallets
:license: BSD-3-Clause
"""
import codecs
import re
from functools import update_wrapper
from itertools import chain
from itertools import repeat
from itertools import tee
from ._compat import BytesIO
from ._compat import text_type
from ._compat import to_native
from .datastructures import FileStorage
from .datastructures import Headers
from .datastructures import MultiDict
from .http import parse_options_header
from .urls import url_decode_stream
from .wsgi import get_content_length
from .wsgi import get_input_stream
from .wsgi import make_line_iter
# there are some platforms where SpooledTemporaryFile is not available.
# In that case we need to provide a fallback.
try:
from tempfile import SpooledTemporaryFile
except ImportError:
from tempfile import TemporaryFile
SpooledTemporaryFile = None
#: an iterator that yields empty strings
_empty_string_iter = repeat("")
#: a regular expression for multipart boundaries
_multipart_boundary_re = re.compile("^[ -~]{0,200}[!-~]$")
#: supported http encodings that are also available in python we support
#: for multipart messages.
_supported_multipart_encodings = frozenset(["base64", "quoted-printable"])
def default_stream_factory(
total_content_length, filename, content_type, content_length=None
):
"""The stream factory that is used per default."""
max_size = 1024 * 500
if SpooledTemporaryFile is not None:
return SpooledTemporaryFile(max_size=max_size, mode="wb+")
if total_content_length is None or total_content_length > max_size:
return TemporaryFile("wb+")
return BytesIO()
def parse_form_data(
environ,
stream_factory=None,
charset="utf-8",
errors="replace",
max_form_memory_size=None,
max_content_length=None,
cls=None,
silent=True,
):
"""Parse the form data in the environ and return it as tuple in the form
``(stream, form, files)``. You should only call this method if the
transport method is `POST`, `PUT`, or `PATCH`.
If the mimetype of the data transmitted is `multipart/form-data` the
files multidict will be filled with `FileStorage` objects. If the
mimetype is unknown the input stream is wrapped and returned as first
argument, else the stream is empty.
This is a shortcut for the common usage of :class:`FormDataParser`.
Have a look at :ref:`dealing-with-request-data` for more details.
.. versionadded:: 0.5
The `max_form_memory_size`, `max_content_length` and
`cls` parameters were added.
.. versionadded:: 0.5.1
The optional `silent` flag was added.
:param environ: the WSGI environment to be used for parsing.
:param stream_factory: An optional callable that returns a new read and
writeable file descriptor. This callable works
the same as :meth:`~BaseResponse._get_file_stream`.
:param charset: The character set for URL and url encoded form data.
:param errors: The encoding error behavior.
:param max_form_memory_size: the maximum number of bytes to be accepted for
in-memory stored form data. If the data
exceeds the value specified an
:exc:`~exceptions.RequestEntityTooLarge`
exception is raised.
:param max_content_length: If this is provided and the transmitted data
is longer than this value an
:exc:`~exceptions.RequestEntityTooLarge`
exception is raised.
:param cls: an optional dict class to use. If this is not specified
or `None` the default :class:`MultiDict` is used.
:param silent: If set to False parsing errors will not be caught.
:return: A tuple in the form ``(stream, form, files)``.
"""
return FormDataParser(
stream_factory,
charset,
errors,
max_form_memory_size,
max_content_length,
cls,
silent,
).parse_from_environ(environ)
def exhaust_stream(f):
"""Helper decorator for methods that exhausts the stream on return."""
def wrapper(self, stream, *args, **kwargs):
try:
return f(self, stream, *args, **kwargs)
finally:
exhaust = getattr(stream, "exhaust", None)
if exhaust is not None:
exhaust()
else:
while 1:
chunk = stream.read(1024 * 64)
if not chunk:
break
return update_wrapper(wrapper, f)
class FormDataParser(object):
"""This class implements parsing of form data for Werkzeug. By itself
it can parse multipart and url encoded form data. It can be subclassed
and extended but for most mimetypes it is a better idea to use the
untouched stream and expose it as separate attributes on a request
object.
.. versionadded:: 0.8
:param stream_factory: An optional callable that returns a new read and
writeable file descriptor. This callable works
the same as :meth:`~BaseResponse._get_file_stream`.
:param charset: The character set for URL and url encoded form data.
:param errors: The encoding error behavior.
:param max_form_memory_size: the maximum number of bytes to be accepted for
in-memory stored form data. If the data
exceeds the value specified an
:exc:`~exceptions.RequestEntityTooLarge`
exception is raised.
:param max_content_length: If this is provided and the transmitted data
is longer than this value an
:exc:`~exceptions.RequestEntityTooLarge`
exception is raised.
:param cls: an optional dict class to use. If this is not specified
or `None` the default :class:`MultiDict` is used.
:param silent: If set to False parsing errors will not be caught.
"""
def __init__(
self,
stream_factory=None,
charset="utf-8",
errors="replace",
max_form_memory_size=None,
max_content_length=None,
cls=None,
silent=True,
):
if stream_factory is None:
stream_factory = default_stream_factory
self.stream_factory = stream_factory
self.charset = charset
self.errors = errors
self.max_form_memory_size = max_form_memory_size
self.max_content_length = max_content_length
if cls is None:
cls = MultiDict
self.cls = cls
self.silent = silent
def get_parse_func(self, mimetype, options):
return self.parse_functions.get(mimetype)
def parse_from_environ(self, environ):
"""Parses the information from the environment as form data.
:param environ: the WSGI environment to be used for parsing.
:return: A tuple in the form ``(stream, form, files)``.
"""
content_type = environ.get("CONTENT_TYPE", "")
content_length = get_content_length(environ)
mimetype, options = parse_options_header(content_type)
return self.parse(get_input_stream(environ), mimetype, content_length, options)
def parse(self, stream, mimetype, content_length, options=None):
"""Parses the information from the given stream, mimetype,
content length and mimetype parameters.
:param stream: an input stream
:param mimetype: the mimetype of the data
:param content_length: the content length of the incoming data
:param options: optional mimetype parameters (used for
the multipart boundary for instance)
:return: A tuple in the form ``(stream, form, files)``.
"""
if (
self.max_content_length is not None
and content_length is not None
and content_length > self.max_content_length
):
raise exceptions.RequestEntityTooLarge()
if options is None:
options = {}
parse_func = self.get_parse_func(mimetype, options)
if parse_func is not None:
try:
return parse_func(self, stream, mimetype, content_length, options)
except ValueError:
if not self.silent:
raise
return stream, self.cls(), self.cls()
@exhaust_stream
def _parse_multipart(self, stream, mimetype, content_length, options):
parser = MultiPartParser(
self.stream_factory,
self.charset,
self.errors,
max_form_memory_size=self.max_form_memory_size,
cls=self.cls,
)
boundary = options.get("boundary")
if boundary is None:
raise ValueError("Missing boundary")
if isinstance(boundary, text_type):
boundary = boundary.encode("ascii")
form, files = parser.parse(stream, boundary, content_length)
return stream, form, files
@exhaust_stream
def _parse_urlencoded(self, stream, mimetype, content_length, options):
if (
self.max_form_memory_size is not None
and content_length is not None
and content_length > self.max_form_memory_size
):
raise exceptions.RequestEntityTooLarge()
form = url_decode_stream(stream, self.charset, errors=self.errors, cls=self.cls)
return stream, form, self.cls()
#: mapping of mimetypes to parsing functions
parse_functions = {
"multipart/form-data": _parse_multipart,
"application/x-www-form-urlencoded": _parse_urlencoded,
"application/x-url-encoded": _parse_urlencoded,
}
def is_valid_multipart_boundary(boundary):
"""Checks if the string given is a valid multipart boundary."""
return _multipart_boundary_re.match(boundary) is not None
def _line_parse(line):
"""Removes line ending characters and returns a tuple (`stripped_line`,
`is_terminated`).
"""
if line[-2:] in ["\r\n", b"\r\n"]:
return line[:-2], True
elif line[-1:] in ["\r", "\n", b"\r", b"\n"]:
return line[:-1], True
return line, False
def parse_multipart_headers(iterable):
"""Parses multipart headers from an iterable that yields lines (including
the trailing newline symbol). The iterable has to be newline terminated.
The iterable will stop at the line where the headers ended so it can be
further consumed.
:param iterable: iterable of strings that are newline terminated
"""
result = []
for line in iterable:
line = to_native(line)
line, line_terminated = _line_parse(line)
if not line_terminated:
raise ValueError("unexpected end of line in multipart header")
if not line:
break
elif line[0] in " \t" and result:
key, value = result[-1]
result[-1] = (key, value + "\n " + line[1:])
else:
parts = line.split(":", 1)
if len(parts) == 2:
result.append((parts[0].strip(), parts[1].strip()))
# we link the list to the headers, no need to create a copy, the
# list was not shared anyways.
return Headers(result)
_begin_form = "begin_form"
_begin_file = "begin_file"
_cont = "cont"
_end = "end"
class MultiPartParser(object):
def __init__(
self,
stream_factory=None,
charset="utf-8",
errors="replace",
max_form_memory_size=None,
cls=None,
buffer_size=64 * 1024,
):
self.charset = charset
self.errors = errors
self.max_form_memory_size = max_form_memory_size
self.stream_factory = (
default_stream_factory if stream_factory is None else stream_factory
)
self.cls = MultiDict if cls is None else cls
# make sure the buffer size is divisible by four so that we can base64
# decode chunk by chunk
assert buffer_size % 4 == 0, "buffer size has to be divisible by 4"
# also the buffer size has to be at least 1024 bytes long or long headers
# will freak out the system
assert buffer_size >= 1024, "buffer size has to be at least 1KB"
self.buffer_size = buffer_size
def _fix_ie_filename(self, filename):
"""Internet Explorer 6 transmits the full file name if a file is
uploaded. This function strips the full path if it thinks the
filename is Windows-like absolute.
"""
if filename[1:3] == ":\\" or filename[:2] == "\\\\":
return filename.split("\\")[-1]
return filename
def _find_terminator(self, iterator):
"""The terminator might have some additional newlines before it.
There is at least one application that sends additional newlines
before headers (the python setuptools package).
"""
for line in iterator:
if not line:
break
line = line.strip()
if line:
return line
return b""
def fail(self, message):
raise ValueError(message)
def get_part_encoding(self, headers):
transfer_encoding = headers.get("content-transfer-encoding")
if (
transfer_encoding is not None
and transfer_encoding in _supported_multipart_encodings
):
return transfer_encoding
def get_part_charset(self, headers):
# Figure out input charset for current part
content_type = headers.get("content-type")
if content_type:
mimetype, ct_params = parse_options_header(content_type)
return ct_params.get("charset", self.charset)
return self.charset
def start_file_streaming(self, filename, headers, total_content_length):
if isinstance(filename, bytes):
filename = filename.decode(self.charset, self.errors)
filename = self._fix_ie_filename(filename)
content_type = headers.get("content-type")
try:
content_length = int(headers["content-length"])
except (KeyError, ValueError):
content_length = 0
container = self.stream_factory(
total_content_length=total_content_length,
filename=filename,
content_type=content_type,
content_length=content_length,
)
return filename, container
def in_memory_threshold_reached(self, bytes):
raise exceptions.RequestEntityTooLarge()
def validate_boundary(self, boundary):
if not boundary:
self.fail("Missing boundary")
if not is_valid_multipart_boundary(boundary):
self.fail("Invalid boundary: %s" % boundary)
if len(boundary) > self.buffer_size: # pragma: no cover
# this should never happen because we check for a minimum size
# of 1024 and boundaries may not be longer than 200. The only
# situation when this happens is for non debug builds where
# the assert is skipped.
self.fail("Boundary longer than buffer size")
def parse_lines(self, file, boundary, content_length, cap_at_buffer=True):
"""Generate parts of
``('begin_form', (headers, name))``
``('begin_file', (headers, name, filename))``
``('cont', bytestring)``
``('end', None)``
Always obeys the grammar
parts = ( begin_form cont* end |
begin_file cont* end )*
"""
next_part = b"--" + boundary
last_part = next_part + b"--"
iterator = chain(
make_line_iter(
file,
limit=content_length,
buffer_size=self.buffer_size,
cap_at_buffer=cap_at_buffer,
),
_empty_string_iter,
)
terminator = self._find_terminator(iterator)
if terminator == last_part:
return
elif terminator != next_part:
self.fail("Expected boundary at start of multipart data")
while terminator != last_part:
headers = parse_multipart_headers(iterator)
disposition = headers.get("content-disposition")
if disposition is None:
self.fail("Missing Content-Disposition header")
disposition, extra = parse_options_header(disposition)
transfer_encoding = self.get_part_encoding(headers)
name = extra.get("name")
filename = extra.get("filename")
# if no content type is given we stream into memory. A list is
# used as a temporary container.
if filename is None:
yield _begin_form, (headers, name)
# otherwise we parse the rest of the headers and ask the stream
# factory for something we can write in.
else:
yield _begin_file, (headers, name, filename)
buf = b""
for line in iterator:
if not line:
self.fail("unexpected end of stream")
if line[:2] == b"--":
terminator = line.rstrip()
if terminator in (next_part, last_part):
break
if transfer_encoding is not None:
if transfer_encoding == "base64":
transfer_encoding = "base64_codec"
try:
line = codecs.decode(line, transfer_encoding)
except Exception:
self.fail("could not decode transfer encoded chunk")
# we have something in the buffer from the last iteration.
# this is usually a newline delimiter.
if buf:
yield _cont, buf
buf = b""
# If the line ends with windows CRLF we write everything except
# the last two bytes. In all other cases however we write
# everything except the last byte. If it was a newline, that's
# fine, otherwise it does not matter because we will write it
# the next iteration. this ensures we do not write the
# final newline into the stream. That way we do not have to
# truncate the stream. However we do have to make sure that
# if something else than a newline is in there we write it
# out.
if line[-2:] == b"\r\n":
buf = b"\r\n"
cutoff = -2
else:
buf = line[-1:]
cutoff = -1
yield _cont, line[:cutoff]
else: # pragma: no cover
raise ValueError("unexpected end of part")
# if we have a leftover in the buffer that is not a newline
# character we have to flush it, otherwise we will chop of
# certain values.
if buf not in (b"", b"\r", b"\n", b"\r\n"):
yield _cont, buf
yield _end, None
def parse_parts(self, file, boundary, content_length):
"""Generate ``('file', (name, val))`` and
``('form', (name, val))`` parts.
"""
in_memory = 0
for ellt, ell in self.parse_lines(file, boundary, content_length):
if ellt == _begin_file:
headers, name, filename = ell
is_file = True
guard_memory = False
filename, container = self.start_file_streaming(
filename, headers, content_length
)
_write = container.write
elif ellt == _begin_form:
headers, name = ell
is_file = False
container = []
_write = container.append
guard_memory = self.max_form_memory_size is not None
elif ellt == _cont:
_write(ell)
# if we write into memory and there is a memory size limit we
# count the number of bytes in memory and raise an exception if
# there is too much data in memory.
if guard_memory:
in_memory += len(ell)
if in_memory > self.max_form_memory_size:
self.in_memory_threshold_reached(in_memory)
elif ellt == _end:
if is_file:
container.seek(0)
yield (
"file",
(name, FileStorage(container, filename, name, headers=headers)),
)
else:
part_charset = self.get_part_charset(headers)
yield (
"form",
(name, b"".join(container).decode(part_charset, self.errors)),
)
def parse(self, file, boundary, content_length):
formstream, filestream = tee(
self.parse_parts(file, boundary, content_length), 2
)
form = (p[1] for p in formstream if p[0] == "form")
files = (p[1] for p in filestream if p[0] == "file")
return self.cls(form), self.cls(files)
from . import exceptions
| apache-2.0 | 4,256,815,946,803,647,000 | 36.1843 | 88 | 0.581184 | false |
apark263/tensorflow | tensorflow/contrib/summary/summary.py | 23 | 3781 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TensorFlow Summary API v2.
The operations in this package are safe to use with eager execution turned on or
off. It has a more flexible API that allows summaries to be written directly
from ops to places other than event log files, rather than propagating protos
from `tf.summary.merge_all` to `tf.summary.FileWriter`.
To use with eager execution enabled, write your code as follows:
```python
global_step = tf.train.get_or_create_global_step()
summary_writer = tf.contrib.summary.create_file_writer(
train_dir, flush_millis=10000)
with summary_writer.as_default(), tf.contrib.summary.always_record_summaries():
# model code goes here
# and in it call
tf.contrib.summary.scalar("loss", my_loss)
# In this case every call to tf.contrib.summary.scalar will generate a record
# ...
```
To use it with graph execution, write your code as follows:
```python
global_step = tf.train.get_or_create_global_step()
summary_writer = tf.contrib.summary.create_file_writer(
train_dir, flush_millis=10000)
with summary_writer.as_default(), tf.contrib.summary.always_record_summaries():
# model definition code goes here
# and in it call
tf.contrib.summary.scalar("loss", my_loss)
# In this case every call to tf.contrib.summary.scalar will generate an op,
# note the need to run tf.contrib.summary.all_summary_ops() to make sure these
# ops get executed.
# ...
train_op = ....
with tf.Session(...) as sess:
tf.global_variables_initializer().run()
tf.contrib.summary.initialize(graph=tf.get_default_graph())
# ...
while not_done_training:
sess.run([train_op, tf.contrib.summary.all_summary_ops()])
# ...
```
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
from tensorflow.python.ops.summary_ops_v2 import all_summary_ops
from tensorflow.python.ops.summary_ops_v2 import always_record_summaries
from tensorflow.python.ops.summary_ops_v2 import audio
from tensorflow.python.ops.summary_ops_v2 import create_db_writer
from tensorflow.python.ops.summary_ops_v2 import create_file_writer
from tensorflow.python.ops.summary_ops_v2 import create_summary_file_writer
from tensorflow.python.ops.summary_ops_v2 import eval_dir
from tensorflow.python.ops.summary_ops_v2 import flush
from tensorflow.python.ops.summary_ops_v2 import generic
from tensorflow.python.ops.summary_ops_v2 import graph
from tensorflow.python.ops.summary_ops_v2 import histogram
from tensorflow.python.ops.summary_ops_v2 import image
from tensorflow.python.ops.summary_ops_v2 import import_event
from tensorflow.python.ops.summary_ops_v2 import initialize
from tensorflow.python.ops.summary_ops_v2 import never_record_summaries
from tensorflow.python.ops.summary_ops_v2 import record_summaries_every_n_global_steps
from tensorflow.python.ops.summary_ops_v2 import scalar
from tensorflow.python.ops.summary_ops_v2 import should_record_summaries
from tensorflow.python.ops.summary_ops_v2 import summary_writer_initializer_op
from tensorflow.python.ops.summary_ops_v2 import SummaryWriter
| apache-2.0 | 4,194,180,100,755,354,000 | 42.965116 | 86 | 0.757207 | false |
rbtcollins/pip | tests/lib/git_submodule_helpers.py | 58 | 2960 | from __future__ import absolute_import
import textwrap
def _create_test_package_submodule(env):
env.scratch_path.join("version_pkg_submodule").mkdir()
submodule_path = env.scratch_path / 'version_pkg_submodule'
env.run('touch', 'testfile', cwd=submodule_path)
env.run('git', 'init', cwd=submodule_path)
env.run('git', 'add', '.', cwd=submodule_path)
env.run('git', 'commit', '-q',
'--author', 'pip <[email protected]>',
'-am', 'initial version / submodule', cwd=submodule_path)
return submodule_path
def _change_test_package_submodule(env, submodule_path):
submodule_path.join("testfile").write("this is a changed file")
submodule_path.join("testfile2").write("this is an added file")
env.run('git', 'add', '.', cwd=submodule_path)
env.run('git', 'commit', '-q',
'--author', 'pip <[email protected]>',
'-am', 'submodule change', cwd=submodule_path)
def _pull_in_submodule_changes_to_module(env, module_path):
env.run(
'git',
'pull',
'-q',
'origin',
'master',
cwd=module_path / 'testpkg/static/',
)
env.run('git', 'commit', '-q',
'--author', 'pip <[email protected]>',
'-am', 'submodule change', cwd=module_path)
def _create_test_package_with_submodule(env):
env.scratch_path.join("version_pkg").mkdir()
version_pkg_path = env.scratch_path / 'version_pkg'
version_pkg_path.join("testpkg").mkdir()
pkg_path = version_pkg_path / 'testpkg'
pkg_path.join("__init__.py").write("# hello there")
pkg_path.join("version_pkg.py").write(textwrap.dedent('''\
def main():
print('0.1')
'''))
version_pkg_path.join("setup.py").write(textwrap.dedent('''\
from setuptools import setup, find_packages
setup(name='version_pkg',
version='0.1',
packages=find_packages(),
)
'''))
env.run('git', 'init', cwd=version_pkg_path, expect_error=True)
env.run('git', 'add', '.', cwd=version_pkg_path, expect_error=True)
env.run('git', 'commit', '-q',
'--author', 'pip <[email protected]>',
'-am', 'initial version', cwd=version_pkg_path,
expect_error=True)
submodule_path = _create_test_package_submodule(env)
env.run(
'git',
'submodule',
'add',
submodule_path,
'testpkg/static',
cwd=version_pkg_path,
expect_error=True,
)
env.run('git', 'commit', '-q',
'--author', 'pip <[email protected]>',
'-am', 'initial version w submodule', cwd=version_pkg_path,
expect_error=True)
return version_pkg_path, submodule_path
| mit | 4,581,578,188,407,214,000 | 35.097561 | 71 | 0.544595 | false |
vicky2135/lucious | oscar/lib/python2.7/site-packages/prompt_toolkit/token.py | 23 | 1420 | """
The Token class, interchangeable with ``pygments.token``.
A `Token` has some semantics for a piece of text that is given a style through
a :class:`~prompt_toolkit.styles.Style` class. A pygments lexer for instance,
returns a list of (Token, text) tuples. Each fragment of text has a token
assigned, which when combined with a style sheet, will determine the fine
style.
"""
# If we don't need any lexers or style classes from Pygments, we don't want
# Pygments to be installed for only the following 10 lines of code. So, there
# is some duplication, but this should stay compatible with Pygments.
__all__ = (
'Token',
'ZeroWidthEscape',
)
class _TokenType(tuple):
def __getattr__(self, val):
if not val or not val[0].isupper():
return tuple.__getattribute__(self, val)
new = _TokenType(self + (val,))
setattr(self, val, new)
return new
def __repr__(self):
return 'Token' + (self and '.' or '') + '.'.join(self)
# Prefer the Token class from Pygments. If Pygments is not installed, use our
# minimalistic Token class.
try:
from pygments.token import Token
except ImportError:
Token = _TokenType()
# Built-in tokens:
#: `ZeroWidthEscape` can be used for raw VT escape sequences that don't
#: cause the cursor position to move. (E.g. FinalTerm's escape sequences
#: for shell integration.)
ZeroWidthEscape = Token.ZeroWidthEscape
| bsd-3-clause | 1,900,132,577,199,305,000 | 29.212766 | 78 | 0.689437 | false |
jeffmahoney/crash-python | crash/commands/syscmd.py | 1 | 2069 | # -*- coding: utf-8 -*-
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
"""
SUMMARY
-------
Display system information and configuration data
::
sys [config]
DESCRIPTION
-----------
This command displays system-specific data. If no arguments are entered,
the same system data shown during crash invocation is shown.
``config`` If the kernel was configured with ``CONFIG_IKCONFIG``, then
dump the in-kernel configuration data.
EXAMPLES
--------
Display essential system information:
::
py-crash> sys config
KERNEL: vmlinux.4
DUMPFILE: lcore.cr.4
CPUS: 4
DATE: Mon Oct 11 18:48:55 1999
UPTIME: 10 days, 14:14:39
LOAD AVERAGE: 0.74, 0.23, 0.08
TASKS: 77
NODENAME: test.mclinux.com
RELEASE: 2.2.5-15smp
VERSION: #24 SMP Mon Oct 11 17:41:40 CDT 1999
MACHINE: i686 (500 MHz)
MEMORY: 1 GB
"""
import argparse
from crash.commands import Command, ArgumentParser
from crash.commands import CommandLineError
from crash.cache.syscache import utsname, config, kernel
class SysCommand(Command):
"""system data"""
def __init__(self, name: str) -> None:
parser = ArgumentParser(prog=name)
parser.add_argument('config', nargs='?')
Command.__init__(self, name, parser)
@staticmethod
def show_default() -> None:
print(" UPTIME: {}".format(kernel.uptime))
print("LOAD AVERAGE: {}".format(kernel.loadavg))
print(" NODENAME: {}".format(utsname.nodename))
print(" RELEASE: {}".format(utsname.release))
print(" VERSION: {}".format(utsname.version))
print(" MACHINE: {}".format(utsname.machine))
def execute(self, args: argparse.Namespace) -> None:
if args.config:
if args.config == "config":
print(config)
else:
raise CommandLineError(f"error: unknown option: {args.config}")
else:
self.show_default()
SysCommand("sys")
| gpl-2.0 | 7,658,069,106,556,025,000 | 25.87013 | 79 | 0.60319 | false |
EmreAtes/spack | var/spack/repos/builtin/packages/startup-notification/package.py | 5 | 1756 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class StartupNotification(AutotoolsPackage):
"""startup-notification contains a reference implementation of the
freedesktop startup notification protocol."""
homepage = "https://www.freedesktop.org/wiki/Software/startup-notification/"
url = "http://www.freedesktop.org/software/startup-notification/releases/startup-notification-0.12.tar.gz"
version('0.12', '2cd77326d4dcaed9a5a23a1232fb38e9')
depends_on('libx11')
depends_on('libxcb')
depends_on('xcb-util')
| lgpl-2.1 | 825,322,512,889,543,700 | 44.025641 | 115 | 0.691344 | false |
FMCalisto/FMCalisto.github.io | node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py | 65 | 4989 | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import os
import locale
def XmlToString(content, encoding='utf-8', pretty=False):
""" Writes the XML content to disk, touching the file only if it has changed.
Visual Studio files have a lot of pre-defined structures. This function makes
it easy to represent these structures as Python data structures, instead of
having to create a lot of function calls.
Each XML element of the content is represented as a list composed of:
1. The name of the element, a string,
2. The attributes of the element, a dictionary (optional), and
3+. The content of the element, if any. Strings are simple text nodes and
lists are child elements.
Example 1:
<test/>
becomes
['test']
Example 2:
<myelement a='value1' b='value2'>
<childtype>This is</childtype>
<childtype>it!</childtype>
</myelement>
becomes
['myelement', {'a':'value1', 'b':'value2'},
['childtype', 'This is'],
['childtype', 'it!'],
]
Args:
content: The structured content to be converted.
encoding: The encoding to report on the first XML line.
pretty: True if we want pretty printing with indents and new lines.
Returns:
The XML content as a string.
"""
# We create a huge list of all the elements of the file.
xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
if pretty:
xml_parts.append('\n')
_ConstructContentList(xml_parts, content, pretty)
# Convert it to a string
return ''.join(xml_parts)
def _ConstructContentList(xml_parts, specification, pretty, level=0):
""" Appends the XML parts corresponding to the specification.
Args:
xml_parts: A list of XML parts to be appended to.
specification: The specification of the element. See EasyXml docs.
pretty: True if we want pretty printing with indents and new lines.
level: Indentation level.
"""
# The first item in a specification is the name of the element.
if pretty:
indentation = ' ' * level
new_line = '\n'
else:
indentation = ''
new_line = ''
name = specification[0]
if not isinstance(name, str):
raise Exception('The first item of an EasyXml specification should be '
'a string. Specification was ' + str(specification))
xml_parts.append(indentation + '<' + name)
# Optionally in second position is a dictionary of the attributes.
rest = specification[1:]
if rest and isinstance(rest[0], dict):
for at, val in sorted(rest[0].iteritems()):
xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True)))
rest = rest[1:]
if rest:
xml_parts.append('>')
all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
multi_line = not all_strings
if multi_line and new_line:
xml_parts.append(new_line)
for child_spec in rest:
# If it's a string, append a text node.
# Otherwise recurse over that child definition
if isinstance(child_spec, str):
xml_parts.append(_XmlEscape(child_spec))
else:
_ConstructContentList(xml_parts, child_spec, pretty, level + 1)
if multi_line and indentation:
xml_parts.append(indentation)
xml_parts.append('</%s>%s' % (name, new_line))
else:
xml_parts.append('/>%s' % new_line)
def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False,
win32=False):
""" Writes the XML content to disk, touching the file only if it has changed.
Args:
content: The structured content to be written.
path: Location of the file.
encoding: The encoding to report on the first line of the XML file.
pretty: True if we want pretty printing with indents and new lines.
"""
xml_string = XmlToString(content, encoding, pretty)
if win32 and os.linesep != '\r\n':
xml_string = xml_string.replace('\n', '\r\n')
default_encoding = locale.getdefaultlocale()[1]
if default_encoding.upper() != encoding.upper():
xml_string = xml_string.decode(default_encoding).encode(encoding)
# Get the old content
try:
f = open(path, 'r')
existing = f.read()
f.close()
except:
existing = None
# It has changed, write it
if existing != xml_string:
f = open(path, 'w')
f.write(xml_string)
f.close()
_xml_escape_map = {
'"': '"',
"'": ''',
'<': '<',
'>': '>',
'&': '&',
'\n': '
',
'\r': '
',
}
_xml_escape_re = re.compile(
"(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
def _XmlEscape(value, attr=False):
""" Escape a string for inclusion in XML."""
def replace(match):
m = match.string[match.start() : match.end()]
# don't replace single quotes in attrs
if attr and m == "'":
return m
return _xml_escape_map[m]
return _xml_escape_re.sub(replace, value)
| mit | -873,029,233,404,161,700 | 29.796296 | 80 | 0.642814 | false |
grevutiu-gabriel/sympy | sympy/geometry/tests/test_plane.py | 36 | 7702 | from __future__ import division
from sympy import (Abs, I, Dummy, Rational, Float, S, Symbol, cos, oo, pi,
simplify, sin, sqrt, symbols, Derivative, asin, acos)
from sympy.geometry import (Circle, Curve, Ellipse, GeometryError, Line, Point,
Polygon, Ray, RegularPolygon, Segment, Triangle,
are_similar, convex_hull, intersection,
Point3D, Line3D, Ray3D, Segment3D, Plane, centroid)
from sympy.geometry.util import are_coplanar
from sympy.utilities.pytest import raises, slow
x = Symbol('x', real=True)
y = Symbol('y', real=True)
z = Symbol('z', real=True)
t = Symbol('t', real=True)
k = Symbol('k', real=True)
x1 = Symbol('x1', real=True)
x2 = Symbol('x2', real=True)
x3 = Symbol('x3', real=True)
y1 = Symbol('y1', real=True)
y2 = Symbol('y2', real=True)
y3 = Symbol('y3', real=True)
z1 = Symbol('z1', real=True)
z2 = Symbol('z2', real=True)
z3 = Symbol('z3', real=True)
half = Rational(1, 2)
def feq(a, b):
"""Test if two floating point values are 'equal'."""
t = Float("1.0E-10")
return -t < a - b < t
@slow
def test_plane():
p1 = Point3D(0, 0, 0)
p2 = Point3D(1, 1, 1)
p3 = Point3D(1, 2, 3)
p4 = Point3D(x, x, x)
p5 = Point3D(y, y, y)
pl3 = Plane(p1, p2, p3)
pl4 = Plane(p1, normal_vector=(1, 1, 1))
pl4b = Plane(p1, p2)
pl5 = Plane(p3, normal_vector=(1, 2, 3))
pl6 = Plane(Point3D(2, 3, 7), normal_vector=(2, 2, 2))
pl7 = Plane(Point3D(1, -5, -6), normal_vector=(1, -2, 1))
l1 = Line3D(Point3D(5, 0, 0), Point3D(1, -1, 1))
l2 = Line3D(Point3D(0, -2, 0), Point3D(3, 1, 1))
l3 = Line3D(Point3D(0, -1, 0), Point3D(5, -1, 9))
assert Plane(p1, p2, p3) != Plane(p1, p3, p2)
assert Plane(p1, p2, p3).is_coplanar(Plane(p1, p3, p2))
assert pl3 == Plane(Point3D(0, 0, 0), normal_vector=(1, -2, 1))
assert pl3 != pl4
assert pl4 == pl4b
assert pl5 == Plane(Point3D(1, 2, 3), normal_vector=(1, 2, 3))
assert pl5.equation(x, y, z) == x + 2*y + 3*z - 14
assert pl3.equation(x, y, z) == x - 2*y + z
assert pl3.p1 == p1
assert pl4.p1 == p1
assert pl5.p1 == p3
assert pl4.normal_vector == (1, 1, 1)
assert pl5.normal_vector == (1, 2, 3)
assert p1 in pl3
assert p1 in pl4
assert p3 in pl5
assert pl3.projection(Point(0, 0)) == p1
p = pl3.projection(Point3D(1, 1, 0))
assert p == Point3D(7/6, 2/3, 1/6)
assert p in pl3
l = pl3.projection_line(Line(Point(0, 0), Point(1, 1)))
assert l == Line3D(Point3D(0, 0, 0), Point3D(7/6, 2/3, 1/6))
assert l in pl3
# get a segment that does not intersect the plane which is also
# parallel to pl3's normal veector
t = Dummy()
r = pl3.random_point()
a = pl3.perpendicular_line(r).arbitrary_point(t)
s = Segment3D(a.subs(t, 1), a.subs(t, 2))
assert s.p1 not in pl3 and s.p2 not in pl3
assert pl3.projection_line(s).equals(r)
assert pl3.projection_line(Segment(Point(1, 0), Point(1, 1))) == \
Segment3D(Point3D(5/6, 1/3, -1/6), Point3D(7/6, 2/3, 1/6))
assert pl6.projection_line(Ray(Point(1, 0), Point(1, 1))) == \
Ray3D(Point3D(14/3, 11/3, 11/3), Point3D(13/3, 13/3, 10/3))
assert pl3.perpendicular_line(r.args) == pl3.perpendicular_line(r)
assert pl3.is_parallel(pl6) is False
assert pl4.is_parallel(pl6)
assert pl6.is_parallel(l1) is False
assert pl3.is_perpendicular(pl6)
assert pl4.is_perpendicular(pl7)
assert pl6.is_perpendicular(pl7)
assert pl6.is_perpendicular(l1) is False
assert pl7.distance(Point3D(1, 3, 5)) == 5*sqrt(6)/6
assert pl6.distance(Point3D(0, 0, 0)) == 4*sqrt(3)
assert pl6.distance(pl6.p1) == 0
assert pl7.distance(pl6) == 0
assert pl7.distance(l1) == 0
assert pl6.distance(Segment3D(Point3D(2, 3, 1), Point3D(1, 3, 4))) == 0
pl6.distance(Plane(Point3D(5, 5, 5), normal_vector=(8, 8, 8))) == sqrt(3)
assert pl6.angle_between(pl3) == pi/2
assert pl6.angle_between(pl6) == 0
assert pl6.angle_between(pl4) == 0
assert pl7.angle_between(Line3D(Point3D(2, 3, 5), Point3D(2, 4, 6))) == \
-asin(sqrt(3)/6)
assert pl6.angle_between(Ray3D(Point3D(2, 4, 1), Point3D(6, 5, 3))) == \
asin(sqrt(7)/3)
assert pl7.angle_between(Segment3D(Point3D(5, 6, 1), Point3D(1, 2, 4))) == \
-asin(7*sqrt(246)/246)
assert are_coplanar(l1, l2, l3) is False
assert are_coplanar(l1) is False
assert are_coplanar(Point3D(2, 7, 2), Point3D(0, 0, 2),
Point3D(1, 1, 2), Point3D(1, 2, 2))
assert are_coplanar(Plane(p1, p2, p3), Plane(p1, p3, p2))
assert Plane.are_concurrent(pl3, pl4, pl5) is False
assert Plane.are_concurrent(pl6) is False
raises(ValueError, lambda: Plane.are_concurrent(Point3D(0, 0, 0)))
assert pl3.parallel_plane(Point3D(1, 2, 5)) == Plane(Point3D(1, 2, 5), \
normal_vector=(1, -2, 1))
# perpendicular_plane
p = Plane((0, 0, 0), (1, 0, 0))
# default
assert p.perpendicular_plane() == Plane(Point3D(0, 0, 0), (0, 1, 0))
# 1 pt
assert p.perpendicular_plane(Point3D(1, 0, 1)) == \
Plane(Point3D(1, 0, 1), (0, 1, 0))
# pts as tuples
assert p.perpendicular_plane((1, 0, 1), (1, 1, 1)) == \
Plane(Point3D(1, 0, 1), (0, 0, -1))
a, b = Point3D(0, 0, 0), Point3D(0, 1, 0)
Z = (0, 0, 1)
p = Plane(a, normal_vector=Z)
# case 4
assert p.perpendicular_plane(a, b) == Plane(a, (1, 0, 0))
n = Point3D(*Z)
# case 1
assert p.perpendicular_plane(a, n) == Plane(a, (-1, 0, 0))
# case 2
assert Plane(a, normal_vector=b.args).perpendicular_plane(a, a + b) == \
Plane(Point3D(0, 0, 0), (1, 0, 0))
# case 1&3
assert Plane(b, normal_vector=Z).perpendicular_plane(b, b + n) == \
Plane(Point3D(0, 1, 0), (-1, 0, 0))
# case 2&3
assert Plane(b, normal_vector=b.args).perpendicular_plane(n, n + b) == \
Plane(Point3D(0, 0, 1), (1, 0, 0))
assert pl6.intersection(pl6) == [pl6]
assert pl4.intersection(pl4.p1) == [pl4.p1]
assert pl3.intersection(pl6) == [
Line3D(Point3D(8, 4, 0), Point3D(2, 4, 6))]
assert pl3.intersection(Line3D(Point3D(1,2,4), Point3D(4,4,2))) == [
Point3D(2, 8/3, 10/3)]
assert pl3.intersection(Plane(Point3D(6, 0, 0), normal_vector=(2, -5, 3))
) == [Line3D(Point3D(-24, -12, 0), Point3D(-25, -13, -1))]
assert pl6.intersection(Ray3D(Point3D(2, 3, 1), Point3D(1, 3, 4))) == [
Point3D(-1, 3, 10)]
assert pl6.intersection(Segment3D(Point3D(2, 3, 1), Point3D(1, 3, 4))) == [
Point3D(-1, 3, 10)]
assert pl7.intersection(Line(Point(2, 3), Point(4, 2))) == [
Point3D(13/2, 3/4, 0)]
r = Ray(Point(2, 3), Point(4, 2))
assert Plane((1,2,0), normal_vector=(0,0,1)).intersection(r) == [
Ray3D(Point(2, 3), Point(4, 2))]
assert pl3.random_point() in pl3
# issue 8570
l2 = Line3D(Point3D(S(50000004459633)/5000000000000,
-S(891926590718643)/1000000000000000,
S(231800966893633)/100000000000000),
Point3D(S(50000004459633)/50000000000000,
-S(222981647679771)/250000000000000,
S(231800966893633)/100000000000000))
p2 = Plane(Point3D(S(402775636372767)/100000000000000,
-S(97224357654973)/100000000000000,
S(216793600814789)/100000000000000),
(-S('9.00000087501922'), -S('4.81170658872543e-13'),
S('0.0')))
assert str([i.n(2) for i in p2.intersection(l2)]) == \
'[Point3D(4.0, -0.89, 2.3)]'
| bsd-3-clause | -2,150,645,828,502,788,000 | 37.318408 | 80 | 0.574137 | false |
rvalera01/platalist | cloudflare.py | 221 | 2812 | import sys,traceback,urllib2,re, urllib,xbmc
def createCookie(url,cj=None,agent='Mozilla/5.0 (Windows NT 6.1; rv:32.0) Gecko/20100101 Firefox/32.0'):
urlData=''
try:
import urlparse,cookielib,urllib2
class NoRedirection(urllib2.HTTPErrorProcessor):
def http_response(self, request, response):
return response
def parseJSString(s):
try:
offset=1 if s[0]=='+' else 0
val = int(eval(s.replace('!+[]','1').replace('!![]','1').replace('[]','0').replace('(','str(')[offset:]))
return val
except:
pass
#agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0'
if cj==None:
cj = cookielib.CookieJar()
opener = urllib2.build_opener(NoRedirection, urllib2.HTTPCookieProcessor(cj))
opener.addheaders = [('User-Agent', agent)]
response = opener.open(url)
result=urlData = response.read()
response.close()
# print result
# print response.headers
jschl = re.compile('name="jschl_vc" value="(.+?)"/>').findall(result)[0]
init = re.compile('setTimeout\(function\(\){\s*.*?.*:(.*?)};').findall(result)[0]
builder = re.compile(r"challenge-form\'\);\s*(.*)a.v").findall(result)[0]
decryptVal = parseJSString(init)
lines = builder.split(';')
for line in lines:
if len(line)>0 and '=' in line:
sections=line.split('=')
line_val = parseJSString(sections[1])
decryptVal = int(eval(str(decryptVal)+sections[0][-1]+str(line_val)))
# print urlparse.urlparse(url).netloc
answer = decryptVal + len(urlparse.urlparse(url).netloc)
u='/'.join(url.split('/')[:-1])
query = '%s/cdn-cgi/l/chk_jschl?jschl_vc=%s&jschl_answer=%s' % (u, jschl, answer)
if 'type="hidden" name="pass"' in result:
passval=re.compile('name="pass" value="(.*?)"').findall(result)[0]
query = '%s/cdn-cgi/l/chk_jschl?pass=%s&jschl_vc=%s&jschl_answer=%s' % (u,urllib.quote_plus(passval), jschl, answer)
xbmc.sleep(4*1000) ##sleep so that the call work
# print query
# import urllib2
# opener = urllib2.build_opener(NoRedirection,urllib2.HTTPCookieProcessor(cj))
# opener.addheaders = [('User-Agent', agent)]
#print opener.headers
response = opener.open(query)
# print response.headers
#cookie = str(response.headers.get('Set-Cookie'))
#response = opener.open(url)
#print cj
# print response.read()
response.close()
return urlData
except:
traceback.print_exc(file=sys.stdout)
return urlData
| gpl-2.0 | 2,319,688,773,289,185,000 | 38.055556 | 128 | 0.566501 | false |
dstftw/youtube-dl | youtube_dl/extractor/r7.py | 53 | 4600 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import int_or_none
class R7IE(InfoExtractor):
_VALID_URL = r'''(?x)
https?://
(?:
(?:[a-zA-Z]+)\.r7\.com(?:/[^/]+)+/idmedia/|
noticias\.r7\.com(?:/[^/]+)+/[^/]+-|
player\.r7\.com/video/i/
)
(?P<id>[\da-f]{24})
'''
_TESTS = [{
'url': 'http://videos.r7.com/policiais-humilham-suspeito-a-beira-da-morte-morre-com-dignidade-/idmedia/54e7050b0cf2ff57e0279389.html',
'md5': '403c4e393617e8e8ddc748978ee8efde',
'info_dict': {
'id': '54e7050b0cf2ff57e0279389',
'ext': 'mp4',
'title': 'Policiais humilham suspeito à beira da morte: "Morre com dignidade"',
'description': 'md5:01812008664be76a6479aa58ec865b72',
'thumbnail': r're:^https?://.*\.jpg$',
'duration': 98,
'like_count': int,
'view_count': int,
},
}, {
'url': 'http://esportes.r7.com/videos/cigano-manda-recado-aos-fas/idmedia/4e176727b51a048ee6646a1b.html',
'only_matching': True,
}, {
'url': 'http://noticias.r7.com/record-news/video/representante-do-instituto-sou-da-paz-fala-sobre-fim-do-estatuto-do-desarmamento-5480fc580cf2285b117f438d/',
'only_matching': True,
}, {
'url': 'http://player.r7.com/video/i/54e7050b0cf2ff57e0279389?play=true&video=http://vsh.r7.com/54e7050b0cf2ff57e0279389/ER7_RE_BG_MORTE_JOVENS_570kbps_2015-02-2009f17818-cc82-4c8f-86dc-89a66934e633-ATOS_copy.mp4&linkCallback=http://videos.r7.com/policiais-humilham-suspeito-a-beira-da-morte-morre-com-dignidade-/idmedia/54e7050b0cf2ff57e0279389.html&thumbnail=http://vtb.r7.com/ER7_RE_BG_MORTE_JOVENS_570kbps_2015-02-2009f17818-cc82-4c8f-86dc-89a66934e633-thumb.jpg&idCategory=192&share=true&layout=full&full=true',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
video = self._download_json(
'http://player-api.r7.com/video/i/%s' % video_id, video_id)
title = video['title']
formats = []
media_url_hls = video.get('media_url_hls')
if media_url_hls:
formats.extend(self._extract_m3u8_formats(
media_url_hls, video_id, 'mp4', entry_protocol='m3u8_native',
m3u8_id='hls', fatal=False))
media_url = video.get('media_url')
if media_url:
f = {
'url': media_url,
'format_id': 'http',
}
# m3u8 format always matches the http format, let's copy metadata from
# one to another
m3u8_formats = list(filter(
lambda f: f.get('vcodec') != 'none', formats))
if len(m3u8_formats) == 1:
f_copy = m3u8_formats[0].copy()
f_copy.update(f)
f_copy['protocol'] = 'http'
f = f_copy
formats.append(f)
self._sort_formats(formats)
description = video.get('description')
thumbnail = video.get('thumb')
duration = int_or_none(video.get('media_duration'))
like_count = int_or_none(video.get('likes'))
view_count = int_or_none(video.get('views'))
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'like_count': like_count,
'view_count': view_count,
'formats': formats,
}
class R7ArticleIE(InfoExtractor):
_VALID_URL = r'https?://(?:[a-zA-Z]+)\.r7\.com/(?:[^/]+/)+[^/?#&]+-(?P<id>\d+)'
_TEST = {
'url': 'http://tv.r7.com/record-play/balanco-geral/videos/policiais-humilham-suspeito-a-beira-da-morte-morre-com-dignidade-16102015',
'only_matching': True,
}
@classmethod
def suitable(cls, url):
return False if R7IE.suitable(url) else super(R7ArticleIE, cls).suitable(url)
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
video_id = self._search_regex(
r'<div[^>]+(?:id=["\']player-|class=["\']embed["\'][^>]+id=["\'])([\da-f]{24})',
webpage, 'video id')
return self.url_result('http://player.r7.com/video/i/%s' % video_id, R7IE.ie_key())
| unlicense | -8,723,090,007,608,064,000 | 40.0625 | 524 | 0.548598 | false |
jrbl/invenio | modules/webmessage/lib/webmessage.py | 20 | 19732 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
""" WebMessage module, messaging system"""
__revision__ = "$Id$"
import invenio.webmessage_dblayer as db
from invenio.webmessage_config import CFG_WEBMESSAGE_STATUS_CODE, \
CFG_WEBMESSAGE_RESULTS_FIELD, \
CFG_WEBMESSAGE_SEPARATOR, \
CFG_WEBMESSAGE_ROLES_WITHOUT_QUOTA, \
InvenioWebMessageError
from invenio.config import CFG_SITE_LANG, \
CFG_WEBMESSAGE_MAX_SIZE_OF_MESSAGE
from invenio.messages import gettext_set_language
from invenio.dateutils import datetext_default, get_datetext
from invenio.htmlutils import escape_html
from invenio.webuser import list_users_in_roles
try:
import invenio.template
webmessage_templates = invenio.template.load('webmessage')
except:
pass
from invenio.errorlib import register_exception
def perform_request_display_msg(uid, msgid, ln=CFG_SITE_LANG):
"""
Displays a specific message
@param uid: user id
@param msgid: message id
@return: body
"""
_ = gettext_set_language(ln)
body = ""
if (db.check_user_owns_message(uid, msgid) == 0):
# The user doesn't own this message
try:
raise InvenioWebMessageError(_('Sorry, this message in not in your mailbox.'))
except InvenioWebMessageError, exc:
register_exception()
body = webmessage_templates.tmpl_error(exc.message, ln)
return body
else:
(msg_id,
msg_from_id, msg_from_nickname,
msg_sent_to, msg_sent_to_group,
msg_subject, msg_body,
msg_sent_date, msg_received_date,
msg_status) = db.get_message(uid, msgid)
if (msg_id == ""):
# The message exists in table user_msgMESSAGE
# but not in table msgMESSAGE => table inconsistency
try:
raise InvenioWebMessageError(_('This message does not exist.'))
except InvenioWebMessageError, exc:
register_exception()
body = webmessage_templates.tmpl_error(exc.message, ln)
return body
else:
if (msg_status == CFG_WEBMESSAGE_STATUS_CODE['NEW']):
db.set_message_status(uid, msgid,
CFG_WEBMESSAGE_STATUS_CODE['READ'])
body = webmessage_templates.tmpl_display_msg(
msg_id,
msg_from_id,
msg_from_nickname,
msg_sent_to,
msg_sent_to_group,
msg_subject,
msg_body,
msg_sent_date,
msg_received_date,
ln)
return body
def perform_request_display(uid, warnings=[], infos=[], ln=CFG_SITE_LANG):
"""
Displays the user's Inbox
@param uid: user id
@return: body with warnings
"""
body = ""
rows = []
rows = db.get_all_messages_for_user(uid)
nb_messages = db.count_nb_messages(uid)
no_quota_users = list_users_in_roles(CFG_WEBMESSAGE_ROLES_WITHOUT_QUOTA)
no_quota = False
if uid in no_quota_users:
no_quota = True
body = webmessage_templates.tmpl_display_inbox(messages=rows,
infos=infos,
warnings=warnings,
nb_messages=nb_messages,
no_quota=no_quota,
ln=ln)
return body
def perform_request_delete_msg(uid, msgid, ln=CFG_SITE_LANG):
"""
Delete a given message from user inbox
@param uid: user id (int)
@param msgid: message id (int)
@param ln: language
@return: body with warnings
"""
_ = gettext_set_language(ln)
warnings = []
infos = []
body = ""
if (db.check_user_owns_message(uid, msgid) == 0):
# The user doesn't own this message
try:
raise InvenioWebMessageError(_('Sorry, this message in not in your mailbox.'))
except InvenioWebMessageError, exc:
register_exception()
body = webmessage_templates.tmpl_error(exc.message, ln)
return body
else:
if (db.delete_message_from_user_inbox(uid, msgid) == 0):
warnings.append(_("The message could not be deleted."))
else:
infos.append(_("The message was successfully deleted."))
return perform_request_display(uid, warnings, infos, ln)
def perform_request_delete_all(uid, confirmed=False, ln=CFG_SITE_LANG):
"""
Delete every message for a given user
@param uid: user id (int)
@param confirmed: 0 will produce a confirmation message
@param ln: language
@return: body with warnings
"""
infos = []
warnings = []
_ = gettext_set_language(ln)
if confirmed:
db.delete_all_messages(uid)
infos = [_("Your mailbox has been emptied.")]
return perform_request_display(uid, warnings, infos, ln)
else:
body = webmessage_templates.tmpl_confirm_delete(ln)
return body
def perform_request_write(uid,
msg_reply_id="",
msg_to="",
msg_to_group="",
msg_subject="",
msg_body="",
ln=CFG_SITE_LANG):
"""
Display a write a message page.
@param uid: user id.
@type uid: int
@param msg_reply_id: if this message is a reply to another, other's ID.
@type msg_reply_id: int
@param msg_to: comma separated usernames.
@type msg_to: string
@param msg_to_group: comma separated groupnames.
@type msg_to_group: string
@param msg_subject: message subject.
@type msg_subject: string
@param msg_body: message body.
@type msg_body: string
@param ln: language.
@type ln: string
@return: body with warnings.
"""
warnings = []
body = ""
_ = gettext_set_language(ln)
msg_from_nickname = ""
msg_id = 0
if (msg_reply_id):
if (db.check_user_owns_message(uid, msg_reply_id) == 0):
# The user doesn't own this message
try:
raise InvenioWebMessageError(_('Sorry, this message in not in your mailbox.'))
except InvenioWebMessageError, exc:
register_exception()
body = webmessage_templates.tmpl_error(exc.message, ln)
return body
else:
# dummy == variable name to make pylint and pychecker happy!
(msg_id,
msg_from_id, msg_from_nickname,
dummy, dummy,
msg_subject, msg_body,
dummy, dummy, dummy) = db.get_message(uid, msg_reply_id)
if (msg_id == ""):
# The message exists in table user_msgMESSAGE
# but not in table msgMESSAGE => table inconsistency
try:
raise InvenioWebMessageError(_('This message does not exist.'))
except InvenioWebMessageError, exc:
register_exception()
body = webmessage_templates.tmpl_error(exc.message, ln)
return body
else:
msg_to = msg_from_nickname or str(msg_from_id)
body = webmessage_templates.tmpl_write(msg_to=msg_to,
msg_to_group=msg_to_group,
msg_id=msg_id,
msg_subject=msg_subject,
msg_body=msg_body,
warnings=[],
ln=ln)
return body
def perform_request_write_with_search(
uid,
msg_to_user="",
msg_to_group="",
msg_subject="",
msg_body="",
msg_send_year=0,
msg_send_month=0,
msg_send_day=0,
names_selected=[],
search_pattern="",
results_field=CFG_WEBMESSAGE_RESULTS_FIELD['NONE'],
add_values=0,
ln=CFG_SITE_LANG):
"""
Display a write message page, with prefilled values
@param msg_to_user: comma separated usernames (str)
@param msg_to_group: comma separated groupnames (str)
@param msg_subject: message subject (str)
@param msg_bidy: message body (string)
@param msg_send_year: year to send this message on (int)
@param_msg_send_month: month to send this message on (int)
@param_msg_send_day: day to send this message on (int)
@param users_to_add: list of usernames ['str'] to add to msg_to_user
@param groups_to_add: list of groupnames ['str'] to add to msg_to_group
@param user_search_pattern: will search users with this pattern (str)
@param group_search_pattern: will search groups with this pattern (str)
@param mode_user: if 1 display user search box, else group search box
@param add_values: if 1 users_to_add will be added to msg_to_user field..
@param ln: language
@return: body with warnings
"""
warnings = []
search_results_list = []
def cat_names(name1, name2):
""" name1, name2 => 'name1, name2' """
return name1 + CFG_WEBMESSAGE_SEPARATOR + " " + name2
if results_field == CFG_WEBMESSAGE_RESULTS_FIELD['USER']:
if add_values and len(names_selected):
usernames_to_add = reduce(cat_names, names_selected)
if msg_to_user:
msg_to_user = cat_names(msg_to_user, usernames_to_add)
else:
msg_to_user = usernames_to_add
users_found = db.get_nicknames_like(search_pattern)
if users_found:
for user_name in users_found:
search_results_list.append((user_name[0],
user_name[0] in names_selected))
elif results_field == CFG_WEBMESSAGE_RESULTS_FIELD['GROUP']:
if add_values and len(names_selected):
groupnames_to_add = reduce(cat_names, names_selected)
if msg_to_group:
msg_to_group = cat_names(msg_to_group, groupnames_to_add)
else:
msg_to_group = groupnames_to_add
groups_dict = db.get_groupnames_like(uid, search_pattern)
groups_found = groups_dict.values()
if groups_found:
for group_name in groups_found:
search_results_list.append((group_name,
group_name in names_selected))
body = webmessage_templates.tmpl_write(
msg_to=msg_to_user,
msg_to_group=msg_to_group,
msg_subject=msg_subject,
msg_body=msg_body,
msg_send_year=msg_send_year,
msg_send_month=msg_send_month,
msg_send_day=msg_send_day,
warnings=warnings,
search_results_list=search_results_list,
search_pattern=search_pattern,
results_field=results_field,
ln=ln)
return body
def perform_request_send(uid,
msg_to_user="",
msg_to_group="",
msg_subject="",
msg_body="",
msg_send_year=0,
msg_send_month=0,
msg_send_day=0,
ln=CFG_SITE_LANG,
use_email_address = 0):
"""
send a message. if unable return warnings to write page
@param uid: id of user from (int)
@param msg_to_user: comma separated usernames (recipients) (str)
@param msg_to_group: comma separated groupnames (recipeints) (str)
@param msg_subject: subject of message (str)
@param msg_body: body of message (str)
@param msg_send_year: send this message on year x (int)
@param msg_send_month: send this message on month y (int)
@param msg_send_day: send this message on day z (int)
@param ln: language
@return: (body with warnings, title, navtrail)
"""
_ = gettext_set_language(ln)
def strip_spaces(text):
"""suppress spaces before and after x (str)"""
return text.strip()
# wash user input
users_to = map(strip_spaces, msg_to_user.split(CFG_WEBMESSAGE_SEPARATOR))
groups_to = map(strip_spaces, msg_to_group.split(CFG_WEBMESSAGE_SEPARATOR))
if users_to == ['']:
users_to = []
if groups_to == ['']:
groups_to = []
warnings = []
infos = []
problem = None
users_to_str = CFG_WEBMESSAGE_SEPARATOR.join(users_to)
groups_to_str = CFG_WEBMESSAGE_SEPARATOR.join(groups_to)
send_on_date = get_datetext(msg_send_year, msg_send_month, msg_send_day)
if (msg_send_year == msg_send_month == msg_send_day == 0):
status = CFG_WEBMESSAGE_STATUS_CODE['NEW']
else:
status = CFG_WEBMESSAGE_STATUS_CODE['REMINDER']
if send_on_date == datetext_default:
warning = \
_("The chosen date (%(x_year)i/%(x_month)i/%(x_day)i) is invalid.")
warning = warning % {'x_year': msg_send_year,
'x_month': msg_send_month,
'x_day': msg_send_day}
warnings.append(warning)
problem = True
if not(users_to_str or groups_to_str):
# <=> not(users_to_str) AND not(groups_to_str)
warnings.append(_("Please enter a user name or a group name."))
problem = True
if len(msg_body) > CFG_WEBMESSAGE_MAX_SIZE_OF_MESSAGE:
warnings.append(_("Your message is too long, please edit it. Maximum size allowed is %i characters.") % \
(CFG_WEBMESSAGE_MAX_SIZE_OF_MESSAGE,))
problem = True
if use_email_address == 0:
users_dict = db.get_uids_from_nicks(users_to)
users_to = users_dict.items() # users_to=[(nick, uid),(nick2, uid2)]
elif use_email_address == 1:
users_dict = db.get_uids_from_emails(users_to)
users_to = users_dict.items() # users_to=[(email, uid),(email2, uid2)]
groups_dict = db.get_gids_from_groupnames(groups_to)
groups_to = groups_dict.items()
gids_to = []
for (group_name, group_id) in groups_to:
if not(group_id):
warnings.append(_("Group %s does not exist.") % \
(escape_html(group_name)))
problem = 1
else:
gids_to.append(group_id)
# Get uids from gids
uids_from_group = db.get_uids_members_of_groups(gids_to)
# Add the original uids, and make sure there is no double values.
tmp_dict = {}
for uid_receiver in uids_from_group:
tmp_dict[uid_receiver] = None
for (user_nick, user_id) in users_to:
if user_id:
if user_id not in tmp_dict:
uids_from_group.append(user_id)
tmp_dict[user_id] = None
else:
if type(user_nick) == int or \
type(user_nick) == str and user_nick.isdigit():
user_nick = int(user_nick)
if db.user_exists(user_nick) and user_nick not in tmp_dict:
uids_from_group.append(user_nick)
tmp_dict[user_nick] = None
else:
warnings.append(_("User %s does not exist.")% \
(escape_html(user_nick)))
problem = True
if problem:
body = webmessage_templates.tmpl_write(msg_to=users_to_str,
msg_to_group=groups_to_str,
msg_subject=msg_subject,
msg_body=msg_body,
msg_send_year=msg_send_year,
msg_send_month=msg_send_month,
msg_send_day=msg_send_day,
warnings=warnings,
ln=ln)
title = _("Write a message")
navtrail = get_navtrail(ln, title)
return (body, title, navtrail)
else:
msg_id = db.create_message(uid,
users_to_str, groups_to_str,
msg_subject, msg_body,
send_on_date)
uid_problem = db.send_message(uids_from_group, msg_id, status)
if len(uid_problem) > 0:
usernames_problem_dict = db.get_nicks_from_uids(uid_problem)
usernames_problem = usernames_problem_dict.values()
def listing(name1, name2):
""" name1, name2 => 'name1, name2' """
return str(name1) + ", " + str(name2)
warning = _("Your message could not be sent to the following recipients due to their quota:") + " "
warnings.append(warning + reduce(listing, usernames_problem))
if len(uids_from_group) != len(uid_problem):
infos.append(_("Your message has been sent."))
else:
db.check_if_need_to_delete_message_permanently([msg_id])
body = perform_request_display(uid, warnings,
infos, ln)
title = _("Your Messages")
return (body, title, get_navtrail(ln))
def account_new_mail(uid, ln=CFG_SITE_LANG):
"""
display new mail info for myaccount.py page.
@param uid: user id (int)
@param ln: language
@return: html body
"""
nb_new_mail = db.get_nb_new_messages_for_user(uid)
total_mail = db.get_nb_readable_messages_for_user(uid)
return webmessage_templates.tmpl_account_new_mail(nb_new_mail,
total_mail, ln)
def get_navtrail(ln=CFG_SITE_LANG, title=""):
"""
gets the navtrail for title...
@param title: title of the page
@param ln: language
@return: HTML output
"""
navtrail = webmessage_templates.tmpl_navtrail(ln, title)
return navtrail
| gpl-2.0 | 7,042,288,233,730,820,000 | 40.022869 | 113 | 0.529799 | false |
peterlauri/django | django/utils/dateformat.py | 7 | 11927 | """
PHP date() style date formatting
See http://www.php.net/date for format strings
Usage:
>>> import datetime
>>> d = datetime.datetime.now()
>>> df = DateFormat(d)
>>> print(df.format('jS F Y H:i'))
7th October 2003 11:39
>>>
"""
from __future__ import unicode_literals
import calendar
import datetime
import re
import time
from django.utils import six
from django.utils.dates import (
MONTHS, MONTHS_3, MONTHS_ALT, MONTHS_AP, WEEKDAYS, WEEKDAYS_ABBR,
)
from django.utils.encoding import force_text
from django.utils.timezone import get_default_timezone, is_aware, is_naive
from django.utils.translation import ugettext as _
re_formatchars = re.compile(r'(?<!\\)([aAbBcdDeEfFgGhHiIjlLmMnNoOPrsStTUuwWyYzZ])')
re_escaped = re.compile(r'\\(.)')
class Formatter(object):
def format(self, formatstr):
pieces = []
for i, piece in enumerate(re_formatchars.split(force_text(formatstr))):
if i % 2:
if type(self.data) is datetime.date and hasattr(TimeFormat, piece):
raise TypeError(
"The format for date objects may not contain "
"time-related format specifiers (found '%s')." % piece
)
pieces.append(force_text(getattr(self, piece)()))
elif piece:
pieces.append(re_escaped.sub(r'\1', piece))
return ''.join(pieces)
class TimeFormat(Formatter):
def __init__(self, obj):
self.data = obj
self.timezone = None
# We only support timezone when formatting datetime objects,
# not date objects (timezone information not appropriate),
# or time objects (against established django policy).
if isinstance(obj, datetime.datetime):
if is_naive(obj):
self.timezone = get_default_timezone()
else:
self.timezone = obj.tzinfo
def a(self):
"'a.m.' or 'p.m.'"
if self.data.hour > 11:
return _('p.m.')
return _('a.m.')
def A(self):
"'AM' or 'PM'"
if self.data.hour > 11:
return _('PM')
return _('AM')
def B(self):
"Swatch Internet time"
raise NotImplementedError('may be implemented in a future release')
def e(self):
"""
Timezone name.
If timezone information is not available, this method returns
an empty string.
"""
if not self.timezone:
return ""
try:
if hasattr(self.data, 'tzinfo') and self.data.tzinfo:
# Have to use tzinfo.tzname and not datetime.tzname
# because datatime.tzname does not expect Unicode
return self.data.tzinfo.tzname(self.data) or ""
except NotImplementedError:
pass
return ""
def f(self):
"""
Time, in 12-hour hours and minutes, with minutes left off if they're
zero.
Examples: '1', '1:30', '2:05', '2'
Proprietary extension.
"""
if self.data.minute == 0:
return self.g()
return '%s:%s' % (self.g(), self.i())
def g(self):
"Hour, 12-hour format without leading zeros; i.e. '1' to '12'"
if self.data.hour == 0:
return 12
if self.data.hour > 12:
return self.data.hour - 12
return self.data.hour
def G(self):
"Hour, 24-hour format without leading zeros; i.e. '0' to '23'"
return self.data.hour
def h(self):
"Hour, 12-hour format; i.e. '01' to '12'"
return '%02d' % self.g()
def H(self):
"Hour, 24-hour format; i.e. '00' to '23'"
return '%02d' % self.G()
def i(self):
"Minutes; i.e. '00' to '59'"
return '%02d' % self.data.minute
def O(self): # NOQA: E743
"""
Difference to Greenwich time in hours; e.g. '+0200', '-0430'.
If timezone information is not available, this method returns
an empty string.
"""
if not self.timezone:
return ""
seconds = self.Z()
if seconds == "":
return ""
sign = '-' if seconds < 0 else '+'
seconds = abs(seconds)
return "%s%02d%02d" % (sign, seconds // 3600, (seconds // 60) % 60)
def P(self):
"""
Time, in 12-hour hours, minutes and 'a.m.'/'p.m.', with minutes left off
if they're zero and the strings 'midnight' and 'noon' if appropriate.
Examples: '1 a.m.', '1:30 p.m.', 'midnight', 'noon', '12:30 p.m.'
Proprietary extension.
"""
if self.data.minute == 0 and self.data.hour == 0:
return _('midnight')
if self.data.minute == 0 and self.data.hour == 12:
return _('noon')
return '%s %s' % (self.f(), self.a())
def s(self):
"Seconds; i.e. '00' to '59'"
return '%02d' % self.data.second
def T(self):
"""
Time zone of this machine; e.g. 'EST' or 'MDT'.
If timezone information is not available, this method returns
an empty string.
"""
if not self.timezone:
return ""
name = None
try:
name = self.timezone.tzname(self.data)
except Exception:
# pytz raises AmbiguousTimeError during the autumn DST change.
# This happens mainly when __init__ receives a naive datetime
# and sets self.timezone = get_default_timezone().
pass
if name is None:
name = self.format('O')
return six.text_type(name)
def u(self):
"Microseconds; i.e. '000000' to '999999'"
return '%06d' % self.data.microsecond
def Z(self):
"""
Time zone offset in seconds (i.e. '-43200' to '43200'). The offset for
timezones west of UTC is always negative, and for those east of UTC is
always positive.
If timezone information is not available, this method returns
an empty string.
"""
if not self.timezone:
return ""
try:
offset = self.timezone.utcoffset(self.data)
except Exception:
# pytz raises AmbiguousTimeError during the autumn DST change.
# This happens mainly when __init__ receives a naive datetime
# and sets self.timezone = get_default_timezone().
return ""
# `offset` is a datetime.timedelta. For negative values (to the west of
# UTC) only days can be negative (days=-1) and seconds are always
# positive. e.g. UTC-1 -> timedelta(days=-1, seconds=82800, microseconds=0)
# Positive offsets have days=0
return offset.days * 86400 + offset.seconds
class DateFormat(TimeFormat):
year_days = [None, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]
def b(self):
"Month, textual, 3 letters, lowercase; e.g. 'jan'"
return MONTHS_3[self.data.month]
def c(self):
"""
ISO 8601 Format
Example : '2008-01-02T10:30:00.000123'
"""
return self.data.isoformat()
def d(self):
"Day of the month, 2 digits with leading zeros; i.e. '01' to '31'"
return '%02d' % self.data.day
def D(self):
"Day of the week, textual, 3 letters; e.g. 'Fri'"
return WEEKDAYS_ABBR[self.data.weekday()]
def E(self):
"Alternative month names as required by some locales. Proprietary extension."
return MONTHS_ALT[self.data.month]
def F(self):
"Month, textual, long; e.g. 'January'"
return MONTHS[self.data.month]
def I(self): # NOQA: E743
"'1' if Daylight Savings Time, '0' otherwise."
try:
if self.timezone and self.timezone.dst(self.data):
return '1'
else:
return '0'
except Exception:
# pytz raises AmbiguousTimeError during the autumn DST change.
# This happens mainly when __init__ receives a naive datetime
# and sets self.timezone = get_default_timezone().
return ''
def j(self):
"Day of the month without leading zeros; i.e. '1' to '31'"
return self.data.day
def l(self): # NOQA: E743
"Day of the week, textual, long; e.g. 'Friday'"
return WEEKDAYS[self.data.weekday()]
def L(self):
"Boolean for whether it is a leap year; i.e. True or False"
return calendar.isleap(self.data.year)
def m(self):
"Month; i.e. '01' to '12'"
return '%02d' % self.data.month
def M(self):
"Month, textual, 3 letters; e.g. 'Jan'"
return MONTHS_3[self.data.month].title()
def n(self):
"Month without leading zeros; i.e. '1' to '12'"
return self.data.month
def N(self):
"Month abbreviation in Associated Press style. Proprietary extension."
return MONTHS_AP[self.data.month]
def o(self):
"ISO 8601 year number matching the ISO week number (W)"
return self.data.isocalendar()[0]
def r(self):
"RFC 5322 formatted date; e.g. 'Thu, 21 Dec 2000 16:01:07 +0200'"
return self.format('D, j M Y H:i:s O')
def S(self):
"English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th'"
if self.data.day in (11, 12, 13): # Special case
return 'th'
last = self.data.day % 10
if last == 1:
return 'st'
if last == 2:
return 'nd'
if last == 3:
return 'rd'
return 'th'
def t(self):
"Number of days in the given month; i.e. '28' to '31'"
return '%02d' % calendar.monthrange(self.data.year, self.data.month)[1]
def U(self):
"Seconds since the Unix epoch (January 1 1970 00:00:00 GMT)"
if isinstance(self.data, datetime.datetime) and is_aware(self.data):
return int(calendar.timegm(self.data.utctimetuple()))
else:
return int(time.mktime(self.data.timetuple()))
def w(self):
"Day of the week, numeric, i.e. '0' (Sunday) to '6' (Saturday)"
return (self.data.weekday() + 1) % 7
def W(self):
"ISO-8601 week number of year, weeks starting on Monday"
# Algorithm from http://www.personal.ecu.edu/mccartyr/ISOwdALG.txt
week_number = None
jan1_weekday = self.data.replace(month=1, day=1).weekday() + 1
weekday = self.data.weekday() + 1
day_of_year = self.z()
if day_of_year <= (8 - jan1_weekday) and jan1_weekday > 4:
if jan1_weekday == 5 or (jan1_weekday == 6 and calendar.isleap(self.data.year - 1)):
week_number = 53
else:
week_number = 52
else:
if calendar.isleap(self.data.year):
i = 366
else:
i = 365
if (i - day_of_year) < (4 - weekday):
week_number = 1
else:
j = day_of_year + (7 - weekday) + (jan1_weekday - 1)
week_number = j // 7
if jan1_weekday > 4:
week_number -= 1
return week_number
def y(self):
"Year, 2 digits; e.g. '99'"
return six.text_type(self.data.year)[2:]
def Y(self):
"Year, 4 digits; e.g. '1999'"
return self.data.year
def z(self):
"Day of the year; i.e. '0' to '365'"
doy = self.year_days[self.data.month] + self.data.day
if self.L() and self.data.month > 2:
doy += 1
return doy
def format(value, format_string):
"Convenience function"
df = DateFormat(value)
return df.format(format_string)
def time_format(value, format_string):
"Convenience function"
tf = TimeFormat(value)
return tf.format(format_string)
| bsd-3-clause | -8,929,506,698,178,384,000 | 30.55291 | 102 | 0.552612 | false |
kyroskoh/js-test-tool | js_test_tool/tests/test_suite.py | 3 | 26335 | import unittest
import mock
import os
import os.path
from StringIO import StringIO
import yaml
import copy
from textwrap import dedent
from lxml import etree
from js_test_tool.tests.helpers import TempWorkspaceTestCase
from js_test_tool.suite import SuiteDescription, SuiteDescriptionError, \
SuiteRenderer, SuiteRendererError
class SuiteDescriptionTest(TempWorkspaceTestCase):
# Temporary directory paths to be created within our root temp dir
TEMP_DIRS = ['src/subdir', 'spec/subdir', 'lib/subdir',
'src/empty', 'spec/empty', 'lib/empty',
'other_src', 'other_spec', 'other_lib',
'fixtures', 'single_file']
# Test files to create. Paths specified relative to the root temp dir.
LIB_FILES = ['lib/1.js', 'lib/2.js', 'lib/subdir/3.js',
'other_lib/test.js',
'single_file/lib.js']
SRC_FILES = ['src/1.js', 'src/2.js', 'src/subdir/3.js',
'other_src/test.js',
'single_file/src.js']
SPEC_FILES = ['spec/1.js', 'spec/2.js', 'spec/subdir/3.js',
'other_spec/test.js',
'single_file/spec.js']
FIXTURE_FILES = ['fixtures/fix1.html', 'fixtures/fix2.html',
'single_file/fix.html']
IGNORE_FILES = ['src/ignore.txt', 'spec/ignore.txt', 'lib/ignore.txt']
# Valid data used to create the YAML file describing the test suite
YAML_DATA = {'test_suite_name': 'test_suite',
'lib_paths': ['lib', 'other_lib', 'single_file/lib.js'],
'src_paths': ['src', 'other_src', 'single_file/src.js'],
'spec_paths': ['spec', 'other_spec', 'single_file/spec.js'],
'fixture_paths': ['fixtures', 'single_file/fix.html'],
'test_runner': 'jasmine'}
def setUp(self):
"""
Generate fake JS files in a temporary directory.
"""
# Call the superclass implementation to create the temp workspace
super(SuiteDescriptionTest, self).setUp()
# Create subdirectories for dependency, source, and spec files
# Because we are using `makedirs()`, the intermediate directories
# will also be created.
for dir_path in self.TEMP_DIRS:
os.makedirs(os.path.join(self.temp_dir, dir_path))
# Create the test files
all_files = (self.LIB_FILES + self.SRC_FILES
+ self.SPEC_FILES + self.FIXTURE_FILES
+ self.IGNORE_FILES)
for file_path in all_files:
full_path = os.path.join(self.temp_dir, file_path)
with open(full_path, "w") as file_handle:
file_handle.write(u'\u023Eest \u0256ata'.encode('utf8'))
def test_valid_description(self):
# Create an in-memory YAML file from the data
yaml_file = self._yaml_buffer(self.YAML_DATA)
# Create the suite description using the YAML file
desc = SuiteDescription(yaml_file, self.temp_dir)
# Check that the root directory is stored
self.assertEqual(desc.root_dir(), self.temp_dir)
# Check that we find the files we expect
self.assertEqual(desc.suite_name(), self.YAML_DATA['test_suite_name'])
self.assertEqual(desc.lib_paths(), self.LIB_FILES)
self.assertEqual(desc.src_paths(), self.SRC_FILES)
self.assertEqual(desc.spec_paths(), self.SPEC_FILES)
self.assertEqual(desc.fixture_paths(), self.FIXTURE_FILES)
self.assertEqual(desc.test_runner(), self.YAML_DATA['test_runner'])
self.assertEqual(desc.prepend_path(), '')
def test_different_working_dir(self):
# Change the working directory temporarily
# (the superclass will reset it afterwards)
os.chdir(self.TEMP_DIRS[0])
# Create an in-memory YAML file from the data
yaml_file = self._yaml_buffer(self.YAML_DATA)
# Create the suite description using the YAML file
desc = SuiteDescription(yaml_file, self.temp_dir)
# Check that we find the files we expect
self.assertEqual(desc.lib_paths(), self.LIB_FILES)
self.assertEqual(desc.src_paths(), self.SRC_FILES)
self.assertEqual(desc.spec_paths(), self.SPEC_FILES)
self.assertEqual(desc.fixture_paths(), self.FIXTURE_FILES)
self.assertEqual(desc.test_runner(), self.YAML_DATA['test_runner'])
def test_double_dot_paths(self):
# Transform the paths into relative paths
rel_path_map = lambda path: os.path.join('..', path)
yaml_data = copy.deepcopy(self.YAML_DATA)
for key in ['lib_paths', 'src_paths', 'spec_paths', 'fixture_paths']:
yaml_data[key] = map(rel_path_map, yaml_data[key])
# Create a new root directory for the suite
# temp_dir/suite_root
# where the files are still in ../lib, ../src, etc.
suite_root = os.path.join(self.temp_dir, 'suite_root')
os.mkdir(suite_root)
# Create an in-memory YAML file from the data
yaml_file = self._yaml_buffer(yaml_data)
# Expect an error for using relative paths,
# even though the files exist
with self.assertRaises(SuiteDescriptionError):
SuiteDescription(yaml_file, suite_root)
def test_no_such_root_dir(self):
# Try to create a description with a non-existent root directory
yaml_file = self._yaml_buffer(self.YAML_DATA)
no_such_dir = os.path.join(self.temp_dir, 'no_such_dir')
with self.assertRaises(SuiteDescriptionError):
SuiteDescription(yaml_file, no_such_dir)
def test_root_dir_is_file(self):
# Try to create a description with a file (not directory) root
yaml_file = self._yaml_buffer(self.YAML_DATA)
file_path = os.path.join(self.temp_dir, self.SRC_FILES[0])
with self.assertRaises(SuiteDescriptionError):
SuiteDescription(yaml_file, file_path)
def test_non_list_data(self):
# Replace all list values with single values
yaml_data = copy.deepcopy(self.YAML_DATA)
yaml_data['lib_paths'] = 'lib'
yaml_data['src_paths'] = 'src'
yaml_data['spec_paths'] = 'spec'
yaml_data['fixture_paths'] = 'fixtures'
# Create an in-memory YAML file from the data
yaml_file = self._yaml_buffer(yaml_data)
# Create the suite description using the YAML file
desc = SuiteDescription(yaml_file, self.temp_dir)
# Check that we get the right paths
# (exclude files from the directories we left out)
self.assertEqual(desc.lib_paths(), self.LIB_FILES[0:3])
self.assertEqual(desc.src_paths(), self.SRC_FILES[0:3])
self.assertEqual(desc.spec_paths(), self.SPEC_FILES[0:3])
def test_prepend_path_is_not_string(self):
# Set prepend_path to non-string values
for prepend_path in [42, ['list', 'of', 'items'], {'dict': 12}]:
yaml_data = copy.deepcopy(self.YAML_DATA)
yaml_data['prepend_path'] = prepend_path
self._assert_invalid_desc(yaml_data)
def test_yaml_is_list_not_dict(self):
# Set up the YAML file to be a list of dicts instead
# of a dict.
# (This is easy to do by mistake in the YAML syntax).
bad_data = [{key: value} for key, value in self.YAML_DATA.iteritems()]
yaml_file = self._yaml_buffer(bad_data)
# Expect an exception
with self.assertRaises(SuiteDescriptionError):
SuiteDescription(yaml_file, self.temp_dir)
def test_no_lib_specified(self):
# 'lib_paths' is an optional key
yaml_data = copy.deepcopy(self.YAML_DATA)
del yaml_data['lib_paths']
# Create an in-memory YAML file from the data
yaml_file = self._yaml_buffer(yaml_data)
# Create the suite description using the YAML file
desc = SuiteDescription(yaml_file, self.temp_dir)
# Check that we get an empty list of lib paths
self.assertEqual(desc.lib_paths(), [])
def test_no_fixtures_specified(self):
# 'fixture_paths' is an optional key
yaml_data = copy.deepcopy(self.YAML_DATA)
del yaml_data['fixture_paths']
# Create an in-memory YAML file from the data
yaml_file = self._yaml_buffer(yaml_data)
# Create the suite description using the YAML file
desc = SuiteDescription(yaml_file, self.temp_dir)
# Check that we get an empty list of lib paths
self.assertEqual(desc.fixture_paths(), [])
def test_non_js_paths(self):
# Add extra non-JS files
yaml_data = copy.deepcopy(self.YAML_DATA)
yaml_data['src_paths'].append('src.txt')
yaml_data['spec_paths'].append('src.txt')
yaml_data['lib_paths'].append('src.txt')
# Create an in-memory YAML file from the data
yaml_file = self._yaml_buffer(yaml_data)
# Create the suite description using the YAML file
desc = SuiteDescription(yaml_file, self.temp_dir)
# Check that we ignore those files
self.assertEqual(desc.lib_paths(), self.LIB_FILES)
self.assertEqual(desc.src_paths(), self.SRC_FILES)
self.assertEqual(desc.spec_paths(), self.SPEC_FILES)
def test_repeated_paths(self):
# Repeat paths that are already included in the directories
yaml_data = copy.deepcopy(self.YAML_DATA)
yaml_data['src_paths'].append(self.SRC_FILES[0])
yaml_data['spec_paths'].append(self.SPEC_FILES[0])
yaml_data['lib_paths'].append(self.LIB_FILES[0])
yaml_data['fixture_paths'].append(self.FIXTURE_FILES[0])
# Create an in-memory YAML file from the data
yaml_file = self._yaml_buffer(yaml_data)
# Create the suite description using the YAML file
desc = SuiteDescription(yaml_file, self.temp_dir)
# Check that we ignore repeats
self.assertEqual(desc.lib_paths(), self.LIB_FILES)
self.assertEqual(desc.src_paths(), self.SRC_FILES)
self.assertEqual(desc.spec_paths(), self.SPEC_FILES)
self.assertEqual(desc.fixture_paths(), self.FIXTURE_FILES)
def test_prepend_path(self):
# Add a path to prepend to source paths in reports
yaml_data = copy.deepcopy(self.YAML_DATA)
yaml_data['prepend_path'] = 'base/path'
# Create an in-memory YAML file from the data
yaml_file = self._yaml_buffer(yaml_data)
# Create the suite description using the YAML file
desc = SuiteDescription(yaml_file, self.temp_dir)
# Check that the prepend path is stored
self.assertEqual(desc.prepend_path(), 'base/path')
def test_exclude_from_page(self):
# Add in a rule to exclude files in other_* dir
yaml_data = copy.deepcopy(self.YAML_DATA)
yaml_data['exclude_from_page'] = 'other_[^/]*/.*'
# Create an in-memory YAML file from the data
yaml_file = self._yaml_buffer(yaml_data)
# Create the suite description using the YAML file
desc = SuiteDescription(yaml_file, self.temp_dir)
# Check that the root directory is stored
self.assertEqual(desc.root_dir(), self.temp_dir)
# Check that we find the files we expect
expected_lib = self.LIB_FILES[:]
expected_lib.remove('other_lib/test.js')
expected_src = self.SRC_FILES[:]
expected_src.remove('other_src/test.js')
expected_spec = self.SPEC_FILES[:]
expected_spec.remove('other_spec/test.js')
self.assertEqual(desc.lib_paths(only_in_page=True), expected_lib)
self.assertEqual(desc.src_paths(only_in_page=True), expected_src)
self.assertEqual(desc.spec_paths(only_in_page=True), expected_spec)
def test_include_and_exclude_from_page(self):
# Add in a rule to exclude files in other_* dir
yaml_data = copy.deepcopy(self.YAML_DATA)
yaml_data['exclude_from_page'] = 'other_[^/]*/.*'
# Add an override rule to always include other_*/test.js
yaml_data['include_in_page'] = 'other_[^/]*/test.js'
# Create an in-memory YAML file from the data
yaml_file = self._yaml_buffer(yaml_data)
# Create the suite description using the YAML file
desc = SuiteDescription(yaml_file, self.temp_dir)
# Check that the root directory is stored
self.assertEqual(desc.root_dir(), self.temp_dir)
# Check that we still get all the files back
# (the include rule overrides the exclude rule)
self.assertEqual(desc.lib_paths(only_in_page=True), self.LIB_FILES)
self.assertEqual(desc.src_paths(only_in_page=True), self.SRC_FILES)
self.assertEqual(desc.spec_paths(only_in_page=True), self.SPEC_FILES)
def test_missing_required_data(self):
for key in ['test_suite_name', 'src_paths', 'spec_paths', 'test_runner']:
# Delete the required key from the description
yaml_data = copy.deepcopy(self.YAML_DATA)
del yaml_data[key]
# Print a message to make failures more informative
print "Missing key '{}' should raise an exception".format(key)
# Check that we get an exception
self._assert_invalid_desc(yaml_data)
def test_empty_required_list(self):
for key in ['src_paths', 'spec_paths']:
# Replace the key with an empty list
yaml_data = copy.deepcopy(self.YAML_DATA)
yaml_data[key] = []
# Print a message to make failures more informative
print "Empty list for '{}' should raise an exception".format(key)
# Check that we get an exception
self._assert_invalid_desc(yaml_data)
def test_invalid_test_runner(self):
yaml_data = copy.deepcopy(self.YAML_DATA)
yaml_data['test_runner'] = 'invalid_test_runner'
# Check that we get an exception
self._assert_invalid_desc(yaml_data)
def test_invalid_suite_name(self):
invalid_names = [
'with a space',
'with/slash',
'with?question',
'with+plus',
'with&'
]
# Suite names need to be URL-encodable
for invalid in invalid_names:
print invalid
yaml_data = copy.deepcopy(self.YAML_DATA)
yaml_data['test_suite_name'] = invalid
self._assert_invalid_desc(yaml_data)
def _assert_invalid_desc(self, yaml_data):
"""
Given `yaml_data` (dict), assert that it raises
a `SuiteDescriptionError`.
"""
# Create an in-memory YAML file from the data
yaml_file = self._yaml_buffer(yaml_data)
# Expect an exception when we try to parse the YAML file
with self.assertRaises(SuiteDescriptionError):
SuiteDescription(yaml_file, self.temp_dir)
@staticmethod
def _yaml_buffer(data_dict):
"""
Create an in-memory buffer with YAML-encoded data
provided by `data_dict` (a dictionary).
Returns the buffer (a file-like object).
"""
# Encode the `data_dict` as YAML and write it to the buffer
yaml_str = yaml.dump(data_dict)
# Create a file-like string buffer to hold the YAML data
string_buffer = StringIO(yaml_str)
return string_buffer
class SuiteRendererTest(unittest.TestCase):
JASMINE_TEST_RUNNER_SCRIPT = dedent("""
(function() {
var jasmineEnv = jasmine.getEnv();
jasmineEnv.updateInterval = 1000;
var reporter = new jasmine.JsonReporter("js_test_tool_results", "test-suite");
jasmineEnv.addReporter(reporter);
jasmineEnv.specFilter = function(spec) {
return reporter.specFilter(spec);
};
var currentWindowOnload = window.onload;
window.onload = function() {
if (currentWindowOnload) {
currentWindowOnload();
}
execJasmine();
};
function execJasmine() {
try {
jasmineEnv.execute();
}
catch(err) {
window.js_test_tool.reportError(err);
}
}
if (!window.js_test_tool) {
window.js_test_tool = {};
window.js_test_tool.reportError = function(err) {
var resultDiv = document.getElementById("js_test_tool_results");
var errDiv = document.getElementById("js_test_tool_error");
// If an error <div> is defined (e.g. not in dev mode)
// then write the error to that <div>
// so the Browser can report it
if (errDiv) {
errDiv.innerHTML = err.toString()
if ('stack' in err) {
errDiv.innerHTML += "\\n" + err.stack
}
// Signal to the browser that we're done
// to avoid blocking until timeout
resultsDiv.className = "done";
}
// Re-throw the error (e.g. for dev mode)
else {
throw err;
}
}
}
})();
""").strip()
JASMINE_LOAD_FIXTURES_SCRIPT = dedent("""
// Load fixtures if using jasmine-jquery
if (jasmine.getFixtures) {
jasmine.getFixtures().fixturesPath = "/suite/test-suite/include/";
}
""").strip()
ALERT_STUB_SCRIPT = dedent("""
// Stub out modal dialog alerts, which will prevent
// us from accessing the test results in the DOM
window.confirm = function(){return true;};
window.alert = function(){return;};
""").strip()
def setUp(self):
# Create the renderer we will use
self.renderer = SuiteRenderer()
def test_unicode(self):
# Create a mock test suite description
desc = self._mock_desc(['lib1.js', 'lib2.js'],
['src1.js', 'src2.js'],
['spec1.js', 'spec2.js'],
'jasmine')
# Render the description as HTML
html = self.renderer.render_to_string('test-suite', desc)
# Expect that we get a `unicode` string
self.assertTrue(isinstance(html, unicode))
def test_jasmine_runner_includes(self):
jasmine_libs = ['jasmine/jasmine.js',
'jasmine/jasmine-json.js']
lib_paths = ['lib1.js', 'lib2.js']
src_paths = ['src1.js', 'src2.js']
spec_paths = ['spec1.js', 'spec2.js']
# Create a mock test suite description
desc = self._mock_desc(lib_paths, src_paths, spec_paths, 'jasmine')
# Check that we get the right script includes
suite_includes = lib_paths + src_paths + spec_paths
self._assert_js_includes(jasmine_libs, suite_includes, desc)
# Check that only "include_in_page" scripts were used
desc.lib_paths.assert_called_with(only_in_page=True)
desc.src_paths.assert_called_with(only_in_page=True)
desc.spec_paths.assert_called_with(only_in_page=True)
def test_no_lib_files(self):
jasmine_libs = ['jasmine/jasmine.js',
'jasmine/jasmine-json.js']
src_paths = ['src.js']
spec_paths = ['spec.js']
# Create a mock test suite description
desc = self._mock_desc([], src_paths, spec_paths, 'jasmine')
# Check that we get the right script includes
suite_includes = src_paths + spec_paths
self._assert_js_includes(jasmine_libs, suite_includes, desc)
def test_render_jasmine_runner(self):
# Create a test runner page
tree = self._test_runner_html()
# Expect that a <div> exists with the correct ID for the results
div_id = SuiteRenderer.RESULTS_DIV_ID
elems = tree.xpath('//div[@id="{}"]'.format(div_id))
self.assertEqual(len(elems), 1)
# Expect that a <div> exists for reporting JS errors
div_id = SuiteRenderer.ERROR_DIV_ID
elems = tree.xpath('//div[@id="{}"]'.format(div_id))
self.assertEqual(len(elems), 1)
# Expect that the right scripts are available
self._assert_script(tree, self.JASMINE_TEST_RUNNER_SCRIPT, -1)
self._assert_script(tree, self.JASMINE_LOAD_FIXTURES_SCRIPT, -2)
def test_render_jasmine_dev_mode(self):
# Create a test runner page in dev mode
tree = self._test_runner_html(dev_mode=True)
# Should get the same script, except with an HTML reporter
# instead of the custom JSON reporter
expected_script = self.JASMINE_TEST_RUNNER_SCRIPT.replace(
'JsonReporter("js_test_tool_results", "test-suite")',
'HtmlReporter()')
# Check that we have the right script available
self._assert_script(tree, expected_script, -1)
def test_jasmine_dev_mode_includes(self):
# Configure the renderer to use dev mode
self.renderer = SuiteRenderer(dev_mode=True)
# Include the HTMLReporter instead of the JSON reporter
jasmine_libs = ['jasmine/jasmine.js',
'jasmine/jasmine-html.js']
lib_paths = ['lib1.js', 'lib2.js']
src_paths = ['src1.js', 'src2.js']
spec_paths = ['spec1.js', 'spec2.js']
# Create a mock test suite description
desc = self._mock_desc(lib_paths, src_paths, spec_paths, 'jasmine')
# Check that we get the right script includes
suite_includes = lib_paths + src_paths + spec_paths
self._assert_js_includes(jasmine_libs, suite_includes, desc)
def test_stub_alerts(self):
tree = self._test_runner_html()
self._assert_script(tree, self.ALERT_STUB_SCRIPT, 0)
def test_stub_alerts_dev_mode(self):
tree = self._test_runner_html(dev_mode=True)
self._assert_script(tree, self.ALERT_STUB_SCRIPT, 0)
def test_undefined_template(self):
# Create a mock test suite description with an invalid test runner
desc = self._mock_desc([], [], [], 'invalid_test_runner')
# Should get an exception that the template could not be found
with self.assertRaises(SuiteRendererError):
self.renderer.render_to_string('test-suite', desc)
def test_template_render_error(self):
# Create a mock test suite description with no includes
desc = self._mock_desc([], [], [], 'jasmine')
# Patch Jinja2's `render()` function
with mock.patch.object(SuiteRenderer, 'render_template') as render_func:
# Have the render function raise an exception
render_func.side_effect = ValueError()
# Expect that we get a `SuiteRendererError`
with self.assertRaises(SuiteRendererError):
self.renderer.render_to_string('test-suite', desc)
def _test_runner_html(self, dev_mode=False):
"""
Return a parsed tree of the test runner page HTML.
"""
# Configure the renderer to use dev mode
self.renderer = SuiteRenderer(dev_mode=dev_mode)
# Create a mock test suite description
desc = self._mock_desc([], [], [], 'jasmine')
# Render the description to HTML, enabling dev mode
html = self.renderer.render_to_string('test-suite', desc)
# Parse the HTML
return etree.HTML(html)
def _assert_script(self, html_tree, expected_script, script_index):
"""
Assert that the parsed HTML tree `html_tree` contains
`expected_script` in a <script> tag at `script_index` (starting at 0).
"""
# Retrieve the script elements
script_elems = html_tree.xpath('/html/head/script')
# Expect there are enough elements to retrieve the index
self.assertTrue(len(script_elems) > abs(script_index))
# Retrieve the script element
actual_script = script_elems[script_index].text.strip()
# Expect that we got the right script
self.assertEqual(actual_script, expected_script)
def _assert_js_includes(self, runner_includes, suite_includes, suite_desc):
"""
Render `suite_desc` (a `SuiteDescription` instance or mock) to
`html`, then asserts that the `html` contains `<script>` tags with
`runner_includes` (files included by default, with a `/runner/` prefix)
and `suite_includes` (files included by the test suite,
with a `/suite/include` prefix)
"""
# Render the description as HTML
html = self.renderer.render_to_string('test-suite', suite_desc)
# Parse the HTML
tree = etree.HTML(html)
# Retrieve all <script> inclusions
script_elems = tree.xpath('/html/head/script')
# Prepend the runner and suite includes
runner_includes = [os.path.join('/runner', path)
for path in runner_includes]
suite_includes = [os.path.join('/suite', 'test-suite', 'include', path)
for path in suite_includes]
# Check that they match the sources we provided, in order
all_paths = [element.get('src') for element in script_elems
if element.get('src') is not None]
self.assertEqual(all_paths, runner_includes + suite_includes)
@staticmethod
def _mock_desc(lib_paths, src_paths, spec_paths, test_runner):
"""
Create a mock SuiteDescription configured to return
`lib_paths` (paths to JS dependency files)
`src_paths` (paths to JS source files)
`spec_paths` (paths to JS spec files)
`test_runner` (name of the test runner, e.g. Jasmine)
Returns the configured mock
"""
desc = mock.MagicMock(SuiteDescription)
desc.lib_paths.return_value = lib_paths
desc.src_paths.return_value = src_paths
desc.spec_paths.return_value = spec_paths
desc.test_runner.return_value = test_runner
return desc
| apache-2.0 | -6,969,714,067,693,987,000 | 35.98736 | 90 | 0.601291 | false |
elpaso/QGIS | tests/src/python/test_qgsxmlutils.py | 3 | 6048 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsXmlUtils.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Matthias Kuhn'
__date__ = '18/11/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
import qgis # NOQA switch sip api
from qgis.core import (QgsXmlUtils,
QgsProperty,
QgsGeometry,
QgsCoordinateReferenceSystem)
from qgis.PyQt.QtXml import QDomDocument
from qgis.PyQt.QtGui import QColor
from qgis.testing import start_app, unittest
start_app()
class TestQgsXmlUtils(unittest.TestCase):
def test_invalid(self):
"""
Test that invalid attributes are correctly loaded and written
"""
doc = QDomDocument("properties")
elem = QgsXmlUtils.writeVariant(None, doc)
prop2 = QgsXmlUtils.readVariant(elem)
self.assertIsNone(prop2)
def test_integer(self):
"""
Test that maps are correctly loaded and written
"""
doc = QDomDocument("properties")
my_properties = {'a': 1, 'b': 2, 'c': 3, 'd': -1}
elem = QgsXmlUtils.writeVariant(my_properties, doc)
prop2 = QgsXmlUtils.readVariant(elem)
self.assertEqual(my_properties, prop2)
def test_long(self):
"""
Test that maps are correctly loaded and written
"""
doc = QDomDocument("properties")
# not sure if this actually does map to a long?
my_properties = {'a': 9223372036854775808}
elem = QgsXmlUtils.writeVariant(my_properties, doc)
prop2 = QgsXmlUtils.readVariant(elem)
self.assertEqual(my_properties, prop2)
def test_string(self):
"""
Test that strings are correctly loaded and written
"""
doc = QDomDocument("properties")
my_properties = {'a': 'a', 'b': 'b', 'c': 'something_else', 'empty': ''}
elem = QgsXmlUtils.writeVariant(my_properties, doc)
prop2 = QgsXmlUtils.readVariant(elem)
self.assertEqual(my_properties, prop2)
def test_double(self):
"""
Test that maps are correctly loaded and written
"""
doc = QDomDocument("properties")
my_properties = {'a': 0.27, 'b': 1.0, 'c': 5}
elem = QgsXmlUtils.writeVariant(my_properties, doc)
prop2 = QgsXmlUtils.readVariant(elem)
self.assertEqual(my_properties, prop2)
def test_boolean(self):
"""
Test that maps are correctly loaded and written
"""
doc = QDomDocument("properties")
my_properties = {'a': True, 'b': False}
elem = QgsXmlUtils.writeVariant(my_properties, doc)
prop2 = QgsXmlUtils.readVariant(elem)
self.assertEqual(my_properties, prop2)
def test_list(self):
"""
Test that lists are correctly loaded and written
"""
doc = QDomDocument("properties")
my_properties = [1, 4, 'a', 'test', 7.9]
elem = QgsXmlUtils.writeVariant(my_properties, doc)
prop2 = QgsXmlUtils.readVariant(elem)
self.assertEqual(my_properties, prop2)
def test_complex(self):
"""
Test that maps are correctly loaded and written
"""
doc = QDomDocument("properties")
my_properties = {'boolean': True, 'integer': False, 'map': {'a': 1}}
elem = QgsXmlUtils.writeVariant(my_properties, doc)
prop2 = QgsXmlUtils.readVariant(elem)
self.assertEqual(my_properties, prop2)
def test_property(self):
"""
Test that QgsProperty values are correctly loaded and written
"""
doc = QDomDocument("properties")
prop = QgsProperty.fromValue(1001)
elem = QgsXmlUtils.writeVariant(prop, doc)
prop2 = QgsXmlUtils.readVariant(elem)
self.assertEqual(prop, prop2)
prop = QgsProperty.fromExpression('1+2=5')
elem = QgsXmlUtils.writeVariant(prop, doc)
prop2 = QgsXmlUtils.readVariant(elem)
self.assertEqual(prop, prop2)
prop = QgsProperty.fromField('oid')
elem = QgsXmlUtils.writeVariant(prop, doc)
prop2 = QgsXmlUtils.readVariant(elem)
self.assertEqual(prop, prop2)
def test_crs(self):
"""
Test that QgsCoordinateReferenceSystem values are correctly loaded and written
"""
doc = QDomDocument("properties")
crs = QgsCoordinateReferenceSystem('epsg:3111')
elem = QgsXmlUtils.writeVariant(crs, doc)
crs2 = QgsXmlUtils.readVariant(elem)
self.assertTrue(crs2.isValid())
self.assertEqual(crs2.authid(), 'EPSG:3111')
crs = QgsCoordinateReferenceSystem()
elem = QgsXmlUtils.writeVariant(crs, doc)
crs2 = QgsXmlUtils.readVariant(elem)
self.assertFalse(crs2.isValid())
def test_geom(self):
"""
Test that QgsGeometry values are correctly loaded and written
"""
doc = QDomDocument("properties")
g = QgsGeometry.fromWkt('Point(3 4)')
elem = QgsXmlUtils.writeVariant(g, doc)
g2 = QgsXmlUtils.readVariant(elem)
self.assertEqual(g2.asWkt(), 'Point (3 4)')
def test_color(self):
"""
Test that QColor values are correctly loaded and written
"""
doc = QDomDocument("properties")
elem = QgsXmlUtils.writeVariant(QColor(100, 200, 210), doc)
c = QgsXmlUtils.readVariant(elem)
self.assertEqual(c, QColor(100, 200, 210))
elem = QgsXmlUtils.writeVariant(QColor(100, 200, 210, 50), doc)
c = QgsXmlUtils.readVariant(elem)
self.assertEqual(c, QColor(100, 200, 210, 50))
elem = QgsXmlUtils.writeVariant(QColor(), doc)
c = QgsXmlUtils.readVariant(elem)
self.assertFalse(c.isValid())
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 6,973,646,045,881,198,000 | 28.502439 | 86 | 0.615741 | false |
mlperf/training_results_v0.7 | Google/benchmarks/transformer/implementations/transformer-research-TF-tpu-v3-8192/lingvo/tasks/mt/params/wmt14_en_de.py | 3 | 6670 | # Lint as: python2, python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Train NMT Models on WMT'14 English-German machine translation task."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo import model_registry
from REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.core import base_model_params
from lingvo.tasks.mt import base_config
from lingvo.tasks.mt import input_generator
from lingvo.tasks.mt import model
@model_registry.RegisterSingleTaskModel
class WmtEnDeTransformerBase(base_model_params.SingleTaskModelParams):
"""Params for WMT'14 En->De."""
DATADIR = '/usr/local/google/wmt14/wpm/'
VOCAB_SIZE = 32000
def _CommonInputParams(self, is_eval):
"""Input generator params for WMT'14 En->De."""
p = input_generator.NmtInput.Params()
p.tokenizer.vocab_size = self.VOCAB_SIZE
if is_eval:
p.file_random_seed = 27182818
p.file_parallelism = 1
p.file_buffer_size = 1
p.bucket_upper_bound = [10, 14, 19, 26, 36, 50, 70, 98, 137, 200]
p.bucket_batch_limit = [16] * 8 + [4] * 2
else:
p.file_random_seed = 0
p.file_parallelism = 16
p.file_buffer_size = 10000000
p.bucket_upper_bound = ([8, 10, 12, 14, 16, 20, 24, 28] +
[32, 40, 48, 56, 64, 80, 96])
p.bucket_batch_limit = ([512, 409, 341, 292, 256, 204, 170, 146] +
[128, 102, 85, 73, 64, 51, 42])
return p
def Train(self):
p = self._CommonInputParams(is_eval=False)
p.file_pattern = 'tfrecord:' + os.path.join(self.DATADIR,
'train.tfrecords-*')
p.tokenizer.token_vocab_filepath = os.path.join(self.DATADIR,
'wpm-ende.voc')
p.num_samples = 4492447
return p
def Dev(self):
p = self._CommonInputParams(is_eval=True)
p.file_pattern = 'tfrecord:' + os.path.join(self.DATADIR, 'dev.tfrecords')
p.tokenizer.token_vocab_filepath = os.path.join(self.DATADIR,
'wpm-ende.voc')
p.num_samples = 3000
return p
def Test(self):
p = self._CommonInputParams(is_eval=True)
p.file_pattern = 'tfrecord:' + os.path.join(self.DATADIR, 'test.tfrecords')
p.tokenizer.token_vocab_filepath = os.path.join(self.DATADIR,
'wpm-ende.voc')
p.num_samples = 2737
return p
def Task(self):
p = base_config.SetupTransformerParams(
model.TransformerModel.Params(),
name='wmt14_en_de_transformer_base',
vocab_size=self.VOCAB_SIZE,
model_dim=512,
hidden_dim=2048,
num_heads=8,
num_layers=6,
residual_dropout_prob=0.1,
input_dropout_prob=0.1,
learning_rate=3.0,
warmup_steps=40000)
p.eval.samples_per_summary = 7500
return p
@model_registry.RegisterSingleTaskModel
class WmtEnDeTransformerSmall(WmtEnDeTransformerBase):
"""Small Transformer Params for WMT'14 En->De."""
def Task(self):
p = base_config.SetupTransformerParams(
model.TransformerModel.Params(),
name='wmt14_en_de_transformer_small',
vocab_size=self.VOCAB_SIZE,
model_dim=64,
hidden_dim=128,
num_heads=2,
num_layers=2,
residual_dropout_prob=0.1,
input_dropout_prob=0.1,
learning_rate=3.0,
warmup_steps=40000)
p.eval.samples_per_summary = 7500
return p
@model_registry.RegisterSingleTaskModel
class WmtEnDeTransformerSmallCloudTpu(WmtEnDeTransformerSmall):
"""Small Transformer Params for WMT'14 En->De on TPU."""
def _CommonInputParams(self, is_eval):
p = super(WmtEnDeTransformerSmallCloudTpu, self)._CommonInputParams(is_eval)
p.pad_to_max_seq_length = True
p.source_max_length = p.bucket_upper_bound[-1]
p.bucket_batch_limit = [64] * len(p.bucket_upper_bound)
return p
def Task(self):
p = super(WmtEnDeTransformerSmallCloudTpu, self).Task()
p.decoder.token_emb.max_num_shards = 1
p.encoder.token_emb.max_num_shards = 1
return p
@model_registry.RegisterSingleTaskModel
class WmtEnDeRNMT(WmtEnDeTransformerBase):
"""Params for WMT'14 En->De in sync training."""
def _CommonInputParams(self, is_eval):
p = super(WmtEnDeRNMT, self)._CommonInputParams(is_eval)
if is_eval:
p.bucket_upper_bound = [10, 14, 19, 26, 36, 50, 70, 98, 200]
p.bucket_batch_limit = [128] * 8 + [32]
else:
p.bucket_upper_bound = [10, 14, 19, 26, 36, 50, 70, 98]
p.bucket_batch_limit = [128] * 7 + [64]
return p
def Task(self):
p = base_config.SetupRNMTParams(
model.RNMTModel.Params(),
name='wmt14_en_de_rnmtplus_base',
vocab_size=self.VOCAB_SIZE,
embedding_dim=1024,
hidden_dim=1024,
num_heads=4,
num_encoder_layers=6,
num_decoder_layers=8,
learning_rate=1e-4,
l2_regularizer_weight=1e-5,
lr_warmup_steps=500,
lr_decay_start=400000,
lr_decay_end=1200000,
lr_min=0.5,
ls_uncertainty=0.1,
atten_dropout_prob=0.3,
residual_dropout_prob=0.3,
adam_beta2=0.98,
adam_epsilon=1e-6,
)
p.eval.samples_per_summary = 7500
return p
@model_registry.RegisterSingleTaskModel
class WmtEnDeRNMTCloudTpu(WmtEnDeRNMT):
"""Params for WMT'14 En->De in sync training on TPU."""
def _CommonInputParams(self, is_eval):
p = super(WmtEnDeRNMTCloudTpu, self)._CommonInputParams(is_eval)
p.pad_to_max_seq_length = True
p.source_max_length = p.bucket_upper_bound[-1]
p.bucket_batch_limit = [16] * len(p.bucket_upper_bound)
return p
def Task(self):
p = super(WmtEnDeRNMTCloudTpu, self).Task()
p.encoder.emb.max_num_shards = 1
p.decoder.emb.max_num_shards = 1
return p
| apache-2.0 | -923,244,760,926,141,700 | 31.378641 | 107 | 0.632534 | false |
NickPresta/sentry | src/sentry/migrations/0020_auto__add_projectdomain__add_unique_projectdomain_project_domain.py | 6 | 12686 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models, transaction
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ProjectDomain'
db.create_table('sentry_projectdomain', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('project', self.gf('django.db.models.fields.related.ForeignKey')(related_name='domain_set', to=orm['sentry.Project'])),
('domain', self.gf('django.db.models.fields.CharField')(max_length=128)),
))
db.send_create_signal('sentry', ['ProjectDomain'])
# Adding unique constraint on 'ProjectDomain', fields ['project', 'domain']
db.create_unique('sentry_projectdomain', ['project_id', 'domain'])
def backwards(self, orm):
# Removing unique constraint on 'ProjectDomain', fields ['project', 'domain']
db.delete_unique('sentry_projectdomain', ['project_id', 'domain'])
# Deleting model 'ProjectDomain'
db.delete_table('sentry_projectdomain')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sentry.filtervalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'FilterValue'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.groupedmessage': {
'Meta': {'unique_together': "(('project', 'logger', 'view', 'checksum'),)", 'object_name': 'GroupedMessage'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'class_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'traceback': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'view': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'sentry.message': {
'Meta': {'object_name': 'Message'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'class_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'message_set'", 'null': 'True', 'to': "orm['sentry.GroupedMessage']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'message_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'traceback': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'view': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'sentry.messagecountbyminute': {
'Meta': {'unique_together': "(('project', 'group', 'date'),)", 'object_name': 'MessageCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.GroupedMessage']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.messagefiltervalue': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'MessageFilterValue'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.GroupedMessage']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.messageindex': {
'Meta': {'unique_together': "(('column', 'value', 'object_id'),)", 'object_name': 'MessageIndex'},
'column': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'sentry.project': {
'Meta': {'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'owned_project_set'", 'null': 'True', 'to': "orm['auth.User']"}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'sentry.projectdomain': {
'Meta': {'unique_together': "(('project', 'domain'),)", 'object_name': 'ProjectDomain'},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'domain_set'", 'to': "orm['sentry.Project']"})
},
'sentry.projectmember': {
'Meta': {'unique_together': "(('project', 'user'),)", 'object_name': 'ProjectMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'permissions': ('django.db.models.fields.BigIntegerField', [], {}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'project_set'", 'to': "orm['auth.User']"})
}
}
complete_apps = ['sentry']
| bsd-3-clause | -2,977,154,033,931,838,500 | 76.828221 | 182 | 0.553366 | false |
zstackorg/zstack-woodpecker | integrationtest/vm/virtualrouter/vr/test_vr_ha.py | 2 | 1258 | '''
1. Create 1 Test VMs with VR.
2. After 1 VM created, Check VR Appliance VM ha status.
@author: Quarkonics
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.operations.ha_operations as ha_ops
_config_ = {
'timeout' : 600,
'noparallel' : False
}
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
def test():
test_util.test_dsc('Create test vm1 and check')
if test_lib.lib_get_ha_enable() != 'true':
test_util.test_skip("vm ha not enabled. Skip test")
vm1 = test_stub.create_vlan_vm()
test_obj_dict.add_vm(vm1)
vm1.check()
vrs = test_lib.lib_find_vr_by_vm(vm1.vm)
for vr in vrs:
if vr.applianceVmType != "vrouter":
continue
if ha_ops.get_vm_instance_ha_level(vr.uuid) != "NeverStop":
test_util.test_fail('vr: %s is not set to HA mode NeverStop.' % vr.uuid)
vm1.destroy()
test_util.test_pass('Check VR HA mode Success')
#Will be called only if exception happens in test().
def error_cleanup():
test_lib.lib_error_cleanup(test_obj_dict)
| apache-2.0 | -8,793,476,895,990,900,000 | 26.590909 | 84 | 0.635135 | false |
marzique/cs50_finance | sql.py | 1 | 6312 | import datetime
import decimal
import importlib
import logging
import re
import sqlalchemy
import sqlparse
import sys
import warnings
class SQL(object):
"""Wrap SQLAlchemy to provide a simple SQL API."""
def __init__(self, url, **kwargs):
"""
Create instance of sqlalchemy.engine.Engine.
URL should be a string that indicates database dialect and connection arguments.
http://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine
http://docs.sqlalchemy.org/en/latest/dialects/index.html
"""
# log statements to standard error
logging.basicConfig(level=logging.DEBUG)
self.logger = logging.getLogger(__name__)
# create engine, raising exception if back end's module not installed
self.engine = sqlalchemy.create_engine(url, **kwargs)
def execute(self, text, **params):
"""
Execute a SQL statement.
"""
class UserDefinedType(sqlalchemy.TypeDecorator):
"""
Add support for expandable values, a la https://bitbucket.org/zzzeek/sqlalchemy/issues/3953/expanding-parameter.
"""
impl = sqlalchemy.types.UserDefinedType
def process_literal_param(self, value, dialect):
"""Receive a literal parameter value to be rendered inline within a statement."""
def process(value):
"""Render a literal value, escaping as needed."""
# bool
if isinstance(value, bool):
return sqlalchemy.types.Boolean().literal_processor(dialect)(value)
# datetime.date
elif isinstance(value, datetime.date):
return sqlalchemy.types.String().literal_processor(dialect)(value.strftime("%Y-%m-%d"))
# datetime.datetime
elif isinstance(value, datetime.datetime):
return sqlalchemy.types.String().literal_processor(dialect)(value.strftime("%Y-%m-%d %H:%M:%S"))
# datetime.time
elif isinstance(value, datetime.time):
return sqlalchemy.types.String().literal_processor(dialect)(value.strftime("%H:%M:%S"))
# float
elif isinstance(value, float):
return sqlalchemy.types.Float().literal_processor(dialect)(value)
# int
elif isinstance(value, int):
return sqlalchemy.types.Integer().literal_processor(dialect)(value)
# # long
# elif sys.version_info.major != 3 and isinstance(value, long):
# return sqlalchemy.types.Integer().literal_processor(dialect)(value)
# str
elif isinstance(value, str):
return sqlalchemy.types.String().literal_processor(dialect)(value)
# None
elif isinstance(value, sqlalchemy.sql.elements.Null):
return sqlalchemy.types.NullType().literal_processor(dialect)(value)
# unsupported value
raise RuntimeError("unsupported value")
# process value(s), separating with commas as needed
if type(value) is list:
return ", ".join([process(v) for v in value])
else:
return process(value)
# allow only one statement at a time
if len(sqlparse.split(text)) > 1:
raise RuntimeError("too many statements at once")
# raise exceptions for warnings
warnings.filterwarnings("error")
# prepare, execute statement
try:
# construct a new TextClause clause
statement = sqlalchemy.text(text)
# iterate over parameters
for key, value in params.items():
# translate None to NULL
if value is None:
value = sqlalchemy.sql.null()
# bind parameters before statement reaches database, so that bound parameters appear in exceptions
# http://docs.sqlalchemy.org/en/latest/core/sqlelement.html#sqlalchemy.sql.expression.text
statement = statement.bindparams(sqlalchemy.bindparam(key, value=value, type_=UserDefinedType()))
# stringify bound parameters
# http://docs.sqlalchemy.org/en/latest/faq/sqlexpressions.html#how-do-i-render-sql-expressions-as-strings-possibly-with-bound-parameters-inlined
statement = str(statement.compile(compile_kwargs={"literal_binds": True}))
# execute statement
result = self.engine.execute(statement)
# log statement
self.logger.debug(statement)
# if SELECT (or INSERT with RETURNING), return result set as list of dict objects
if re.search(r"^\s*SELECT", statement, re.I):
# coerce any decimal.Decimal objects to float objects
# https://groups.google.com/d/msg/sqlalchemy/0qXMYJvq8SA/oqtvMD9Uw-kJ
rows = [dict(row) for row in result.fetchall()]
for row in rows:
for column in row:
if isinstance(row[column], decimal.Decimal):
row[column] = float(row[column])
return rows
# if INSERT, return primary key value for a newly inserted row
elif re.search(r"^\s*INSERT", statement, re.I):
if self.engine.url.get_backend_name() in ["postgres", "postgresql"]:
result = self.engine.execute(sqlalchemy.text("SELECT LASTVAL()"))
return result.first()[0]
else:
return result.lastrowid
# if DELETE or UPDATE, return number of rows matched
elif re.search(r"^\s*(?:DELETE|UPDATE)", statement, re.I):
return result.rowcount
# if some other statement, return True unless exception
return True
# if constraint violated, return None
except sqlalchemy.exc.IntegrityError:
return None | mit | 336,255,576,216,343,800 | 40.807947 | 156 | 0.571293 | false |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.6.0/Lib/test/test_importlib/frozen/test_finder.py | 17 | 2105 | from .. import abc
from .. import util
machinery = util.import_importlib('importlib.machinery')
import unittest
class FindSpecTests(abc.FinderTests):
"""Test finding frozen modules."""
def find(self, name, path=None):
finder = self.machinery.FrozenImporter
return finder.find_spec(name, path)
def test_module(self):
name = '__hello__'
spec = self.find(name)
self.assertEqual(spec.origin, 'frozen')
def test_package(self):
spec = self.find('__phello__')
self.assertIsNotNone(spec)
def test_module_in_package(self):
spec = self.find('__phello__.spam', ['__phello__'])
self.assertIsNotNone(spec)
# No frozen package within another package to test with.
test_package_in_package = None
# No easy way to test.
test_package_over_module = None
def test_failure(self):
spec = self.find('<not real>')
self.assertIsNone(spec)
(Frozen_FindSpecTests,
Source_FindSpecTests
) = util.test_both(FindSpecTests, machinery=machinery)
class FinderTests(abc.FinderTests):
"""Test finding frozen modules."""
def find(self, name, path=None):
finder = self.machinery.FrozenImporter
return finder.find_module(name, path)
def test_module(self):
name = '__hello__'
loader = self.find(name)
self.assertTrue(hasattr(loader, 'load_module'))
def test_package(self):
loader = self.find('__phello__')
self.assertTrue(hasattr(loader, 'load_module'))
def test_module_in_package(self):
loader = self.find('__phello__.spam', ['__phello__'])
self.assertTrue(hasattr(loader, 'load_module'))
# No frozen package within another package to test with.
test_package_in_package = None
# No easy way to test.
test_package_over_module = None
def test_failure(self):
loader = self.find('<not real>')
self.assertIsNone(loader)
(Frozen_FinderTests,
Source_FinderTests
) = util.test_both(FinderTests, machinery=machinery)
if __name__ == '__main__':
unittest.main()
| mit | -7,729,427,689,351,009,000 | 24.059524 | 61 | 0.636105 | false |
MediaMath/Diamond | src/collectors/beanstalkd/test/testbeanstalkd.py | 2 | 6503 | #!/usr/bin/python
# coding=utf-8
################################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from test import run_only
from mock import Mock
from mock import patch
from diamond.collector import Collector
from beanstalkd import BeanstalkdCollector
################################################################################
def run_only_if_beanstalkc_is_available(func):
try:
import beanstalkc
beanstalkc # workaround for pyflakes issue #13
except ImportError:
beanstalkc = None
pred = lambda: beanstalkc is not None
return run_only(func, pred)
class TestBeanstalkdCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('BeanstalkdCollector', {
'host': 'localhost',
'port': 11300,
})
self.collector = BeanstalkdCollector(config, None)
def test_import(self):
self.assertTrue(BeanstalkdCollector)
@run_only_if_beanstalkc_is_available
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
stats = {
'instance': {
'current-connections': 10,
'max-job-size': 65535,
'cmd-release': 0,
'cmd-reserve': 4386,
'pid': 23703,
'cmd-bury': 0,
'current-producers': 0,
'total-jobs': 4331,
'current-jobs-ready': 0,
'cmd-peek-buried': 0,
'current-tubes': 7,
'current-jobs-delayed': 0,
'uptime': 182954,
'cmd-watch': 55,
'job-timeouts': 0,
'cmd-stats': 1,
'rusage-stime': 295.970497,
'current-jobs-reserved': 0,
'current-jobs-buried': 0,
'cmd-reserve-with-timeout': 0,
'cmd-put': 4331,
'cmd-pause-tube': 0,
'cmd-list-tubes-watched': 0,
'cmd-list-tubes': 0,
'current-workers': 9,
'cmd-list-tube-used': 0,
'cmd-ignore': 0,
'binlog-records-migrated': 0,
'current-waiting': 9,
'cmd-peek': 0,
'cmd-peek-ready': 0,
'cmd-peek-delayed': 0,
'cmd-touch': 0,
'binlog-oldest-index': 0,
'binlog-current-index': 0,
'cmd-use': 4321,
'total-connections': 4387,
'cmd-delete': 4331,
'binlog-max-size': 10485760,
'cmd-stats-job': 0,
'rusage-utime': 125.92787,
'cmd-stats-tube': 0,
'binlog-records-written': 0,
'cmd-kick': 0,
'current-jobs-urgent': 0,
},
'tubes': [
{
'current-jobs-delayed': 0,
'pause': 0,
'name': 'default',
'cmd-pause-tube': 0,
'current-jobs-buried': 0,
'cmd-delete': 10,
'pause-time-left': 0,
'current-waiting': 9,
'current-jobs-ready': 0,
'total-jobs': 10,
'current-watching': 10,
'current-jobs-reserved': 0,
'current-using': 10,
'current-jobs-urgent': 0,
}
]
}
patch_get_stats = patch.object(BeanstalkdCollector,
'_get_stats',
Mock(return_value=stats))
patch_get_stats.start()
self.collector.collect()
patch_get_stats.stop()
metrics = {
'current-connections': 10,
'max-job-size': 65535,
'cmd-release': 0,
'cmd-reserve': 4386,
'pid': 23703,
'cmd-bury': 0,
'current-producers': 0,
'total-jobs': 4331,
'current-jobs-ready': 0,
'cmd-peek-buried': 0,
'current-tubes': 7,
'current-jobs-delayed': 0,
'uptime': 182954,
'cmd-watch': 55,
'job-timeouts': 0,
'cmd-stats': 1,
'rusage-stime': 295.970497,
'current-jobs-reserved': 0,
'current-jobs-buried': 0,
'cmd-reserve-with-timeout': 0,
'cmd-put': 4331,
'cmd-pause-tube': 0,
'cmd-list-tubes-watched': 0,
'cmd-list-tubes': 0,
'current-workers': 9,
'cmd-list-tube-used': 0,
'cmd-ignore': 0,
'binlog-records-migrated': 0,
'current-waiting': 9,
'cmd-peek': 0,
'cmd-peek-ready': 0,
'cmd-peek-delayed': 0,
'cmd-touch': 0,
'binlog-oldest-index': 0,
'binlog-current-index': 0,
'cmd-use': 4321,
'total-connections': 4387,
'cmd-delete': 4331,
'binlog-max-size': 10485760,
'cmd-stats-job': 0,
'rusage-utime': 125.92787,
'cmd-stats-tube': 0,
'binlog-records-written': 0,
'cmd-kick': 0,
'current-jobs-urgent': 0,
'tubes.default.current-jobs-delayed': 0,
'tubes.default.pause': 0,
'tubes.default.cmd-pause-tube': 0,
'tubes.default.current-jobs-buried': 0,
'tubes.default.cmd-delete': 10,
'tubes.default.pause-time-left': 0,
'tubes.default.current-waiting': 9,
'tubes.default.current-jobs-ready': 0,
'tubes.default.total-jobs': 10,
'tubes.default.current-watching': 10,
'tubes.default.current-jobs-reserved': 0,
'tubes.default.current-using': 10,
'tubes.default.current-jobs-urgent': 0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
################################################################################
if __name__ == "__main__":
unittest.main()
| mit | 2,683,888,722,091,763,000 | 33.775401 | 80 | 0.448716 | false |
jtoppins/beaker | SchemaUpgrades/upgrade_0.6.8_system_status_duration.py | 2 | 2652 | #!/usr/bin/python
import datetime
from sqlalchemy import and_
from turbogears.database import session
from bkr.server.util import load_config
from bkr.server.model import System, SystemStatus, SystemActivity, \
SystemStatusDuration
from bkr.server.test.assertions import assert_durations_not_overlapping, \
assert_durations_contiguous
def get_status(value):
if value == u'Working':
value = u'Automated'
try:
return SystemStatus.by_id(int(value))
except ValueError:
return SystemStatus.by_name(value)
def populate_status_durations(system):
assert not system.status_durations
# We don't know what the original status was, so let's set it to None for
# now and see if we can figure it out next
start_time = system.date_added
status = None
for activity in SystemActivity.query().filter(and_(
SystemActivity.object == system,
SystemActivity.field_name.in_([u'Status', u'status_id']),
SystemActivity.action == u'Changed'))\
.order_by(SystemActivity.created):
# Some old records have activity before date_added, probably because
# the former is not in UTC
changed_at = max(system.date_added, activity.created)
# If this is the first status change, old_value might tell us what it
# was before
if status is None:
if activity.old_value:
status = get_status(activity.old_value)
else:
# As a fallback, assume systems always started out broken
status = get_status(u'Broken')
new_status = get_status(activity.new_value)
# If the duration was non-zero, let's record it
if changed_at > start_time and status != new_status:
system.status_durations.append(SystemStatusDuration(
status=status, start_time=start_time, finish_time=changed_at))
status = new_status
start_time = changed_at
if status is None:
status = get_status(u'Broken')
system.status_durations.append(SystemStatusDuration(
status=status, start_time=start_time, finish_time=None))
assert_durations_not_overlapping(system.status_durations)
assert_durations_contiguous(system.status_durations)
assert system.date_added == system.status_durations[0].start_time
if __name__ == '__main__':
load_config()
session.begin()
for system_id in [s.id for s in System.query()]:
system = System.query().get(system_id)
populate_status_durations(system)
session.flush()
session.clear()
session.commit()
| gpl-2.0 | -5,165,479,253,454,364,000 | 39.8 | 82 | 0.653846 | false |
odooindia/odoo | addons/base_action_rule/base_action_rule.py | 12 | 15745 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime, timedelta
import time
import logging
import openerp
from openerp import SUPERUSER_ID
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
_logger = logging.getLogger(__name__)
DATE_RANGE_FUNCTION = {
'minutes': lambda interval: timedelta(minutes=interval),
'hour': lambda interval: timedelta(hours=interval),
'day': lambda interval: timedelta(days=interval),
'month': lambda interval: timedelta(months=interval),
False: lambda interval: timedelta(0),
}
def get_datetime(date_str):
'''Return a datetime from a date string or a datetime string'''
# complete date time if date_str contains only a date
if ' ' not in date_str:
date_str = date_str + " 00:00:00"
return datetime.strptime(date_str, DEFAULT_SERVER_DATETIME_FORMAT)
class base_action_rule(osv.osv):
""" Base Action Rules """
_name = 'base.action.rule'
_description = 'Action Rules'
_order = 'sequence'
_columns = {
'name': fields.char('Rule Name', required=True),
'model_id': fields.many2one('ir.model', 'Related Document Model',
required=True, domain=[('osv_memory', '=', False)]),
'model': fields.related('model_id', 'model', type="char", string='Model'),
'create_date': fields.datetime('Create Date', readonly=1),
'active': fields.boolean('Active',
help="When unchecked, the rule is hidden and will not be executed."),
'sequence': fields.integer('Sequence',
help="Gives the sequence order when displaying a list of rules."),
'kind': fields.selection(
[('on_create', 'On Creation'),
('on_write', 'On Update'),
('on_create_or_write', 'On Creation & Update'),
('on_time', 'Based on Timed Condition')],
string='When to Run'),
'trg_date_id': fields.many2one('ir.model.fields', string='Trigger Date',
help="When should the condition be triggered. If present, will be checked by the scheduler. If empty, will be checked at creation and update.",
domain="[('model_id', '=', model_id), ('ttype', 'in', ('date', 'datetime'))]"),
'trg_date_range': fields.integer('Delay after trigger date',
help="Delay after the trigger date." \
"You can put a negative number if you need a delay before the" \
"trigger date, like sending a reminder 15 minutes before a meeting."),
'trg_date_range_type': fields.selection([('minutes', 'Minutes'), ('hour', 'Hours'),
('day', 'Days'), ('month', 'Months')], 'Delay type'),
'trg_date_calendar_id': fields.many2one(
'resource.calendar', 'Use Calendar',
help='When calculating a day-based timed condition, it is possible to use a calendar to compute the date based on working days.',
ondelete='set null',
),
'act_user_id': fields.many2one('res.users', 'Set Responsible'),
'act_followers': fields.many2many("res.partner", string="Add Followers"),
'server_action_ids': fields.many2many('ir.actions.server', string='Server Actions',
domain="[('model_id', '=', model_id)]",
help="Examples: email reminders, call object service, etc."),
'filter_pre_id': fields.many2one('ir.filters', string='Before Update Filter',
ondelete='restrict',
domain="[('model_id', '=', model_id.model)]",
help="If present, this condition must be satisfied before the update of the record."),
'filter_id': fields.many2one('ir.filters', string='Filter',
ondelete='restrict',
domain="[('model_id', '=', model_id.model)]",
help="If present, this condition must be satisfied before executing the action rule."),
'last_run': fields.datetime('Last Run', readonly=1, copy=False),
}
_defaults = {
'active': True,
'trg_date_range_type': 'day',
}
def onchange_kind(self, cr, uid, ids, kind, context=None):
clear_fields = []
if kind in ['on_create', 'on_create_or_write']:
clear_fields = ['filter_pre_id', 'trg_date_id', 'trg_date_range', 'trg_date_range_type']
elif kind in ['on_write', 'on_create_or_write']:
clear_fields = ['trg_date_id', 'trg_date_range', 'trg_date_range_type']
elif kind == 'on_time':
clear_fields = ['filter_pre_id']
return {'value': dict.fromkeys(clear_fields, False)}
def _filter(self, cr, uid, action, action_filter, record_ids, context=None):
""" filter the list record_ids that satisfy the action filter """
if record_ids and action_filter:
assert action.model == action_filter.model_id, "Filter model different from action rule model"
model = self.pool[action_filter.model_id]
domain = [('id', 'in', record_ids)] + eval(action_filter.domain)
ctx = dict(context or {})
ctx.update(eval(action_filter.context))
record_ids = model.search(cr, uid, domain, context=ctx)
return record_ids
def _process(self, cr, uid, action, record_ids, context=None):
""" process the given action on the records """
model = self.pool[action.model_id.model]
# modify records
values = {}
if 'date_action_last' in model._all_columns:
values['date_action_last'] = time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
if action.act_user_id and 'user_id' in model._all_columns:
values['user_id'] = action.act_user_id.id
if values:
model.write(cr, uid, record_ids, values, context=context)
if action.act_followers and hasattr(model, 'message_subscribe'):
follower_ids = map(int, action.act_followers)
model.message_subscribe(cr, uid, record_ids, follower_ids, context=context)
# execute server actions
if action.server_action_ids:
server_action_ids = map(int, action.server_action_ids)
for record in model.browse(cr, uid, record_ids, context):
action_server_obj = self.pool.get('ir.actions.server')
ctx = dict(context, active_model=model._name, active_ids=[record.id], active_id=record.id)
action_server_obj.run(cr, uid, server_action_ids, context=ctx)
return True
def _register_hook(self, cr, ids=None):
""" Wrap the methods `create` and `write` of the models specified by
the rules given by `ids` (or all existing rules if `ids` is `None`.)
"""
updated = False
if ids is None:
ids = self.search(cr, SUPERUSER_ID, [])
for action_rule in self.browse(cr, SUPERUSER_ID, ids):
model = action_rule.model_id.model
model_obj = self.pool[model]
if not hasattr(model_obj, 'base_action_ruled'):
# monkey-patch methods create and write
def create(self, cr, uid, vals, context=None, **kwargs):
# avoid loops or cascading actions
if context and context.get('action'):
return create.origin(self, cr, uid, vals, context=context)
# call original method with a modified context
context = dict(context or {}, action=True)
new_id = create.origin(self, cr, uid, vals, context=context, **kwargs)
# as it is a new record, we do not consider the actions that have a prefilter
action_model = self.pool.get('base.action.rule')
action_dom = [('model', '=', self._name),
('kind', 'in', ['on_create', 'on_create_or_write'])]
action_ids = action_model.search(cr, uid, action_dom, context=context)
# check postconditions, and execute actions on the records that satisfy them
for action in action_model.browse(cr, uid, action_ids, context=context):
if action_model._filter(cr, uid, action, action.filter_id, [new_id], context=context):
action_model._process(cr, uid, action, [new_id], context=context)
return new_id
def write(self, cr, uid, ids, vals, context=None, **kwargs):
# avoid loops or cascading actions
if context and context.get('action'):
return write.origin(self, cr, uid, ids, vals, context=context)
# modify context
context = dict(context or {}, action=True)
ids = [ids] if isinstance(ids, (int, long, str)) else ids
# retrieve the action rules to possibly execute
action_model = self.pool.get('base.action.rule')
action_dom = [('model', '=', self._name),
('kind', 'in', ['on_write', 'on_create_or_write'])]
action_ids = action_model.search(cr, uid, action_dom, context=context)
actions = action_model.browse(cr, uid, action_ids, context=context)
# check preconditions
pre_ids = {}
for action in actions:
pre_ids[action] = action_model._filter(cr, uid, action, action.filter_pre_id, ids, context=context)
# call original method
write.origin(self, cr, uid, ids, vals, context=context, **kwargs)
# check postconditions, and execute actions on the records that satisfy them
for action in actions:
post_ids = action_model._filter(cr, uid, action, action.filter_id, pre_ids[action], context=context)
if post_ids:
action_model._process(cr, uid, action, post_ids, context=context)
return True
model_obj._patch_method('create', create)
model_obj._patch_method('write', write)
model_obj.base_action_ruled = True
updated = True
return updated
def create(self, cr, uid, vals, context=None):
res_id = super(base_action_rule, self).create(cr, uid, vals, context=context)
if self._register_hook(cr, [res_id]):
openerp.modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return res_id
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
super(base_action_rule, self).write(cr, uid, ids, vals, context=context)
if self._register_hook(cr, ids):
openerp.modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return True
def onchange_model_id(self, cr, uid, ids, model_id, context=None):
data = {'model': False, 'filter_pre_id': False, 'filter_id': False}
if model_id:
model = self.pool.get('ir.model').browse(cr, uid, model_id, context=context)
data.update({'model': model.model})
return {'value': data}
def _check_delay(self, cr, uid, action, record, record_dt, context=None):
if action.trg_date_calendar_id and action.trg_date_range_type == 'day':
start_dt = get_datetime(record_dt)
action_dt = self.pool['resource.calendar'].schedule_days_get_date(
cr, uid, action.trg_date_calendar_id.id, action.trg_date_range,
day_date=start_dt, compute_leaves=True, context=context
)
else:
delay = DATE_RANGE_FUNCTION[action.trg_date_range_type](action.trg_date_range)
action_dt = get_datetime(record_dt) + delay
return action_dt
def _check(self, cr, uid, automatic=False, use_new_cursor=False, context=None):
""" This Function is called by scheduler. """
context = context or {}
# retrieve all the action rules to run based on a timed condition
action_dom = [('kind', '=', 'on_time')]
action_ids = self.search(cr, uid, action_dom, context=context)
for action in self.browse(cr, uid, action_ids, context=context):
now = datetime.now()
if action.last_run:
last_run = get_datetime(action.last_run)
else:
last_run = datetime.utcfromtimestamp(0)
# retrieve all the records that satisfy the action's condition
model = self.pool[action.model_id.model]
domain = []
ctx = dict(context)
if action.filter_id:
domain = eval(action.filter_id.domain)
ctx.update(eval(action.filter_id.context))
if 'lang' not in ctx:
# Filters might be language-sensitive, attempt to reuse creator lang
# as we are usually running this as super-user in background
[filter_meta] = action.filter_id.get_metadata()
user_id = filter_meta['write_uid'] and filter_meta['write_uid'][0] or \
filter_meta['create_uid'][0]
ctx['lang'] = self.pool['res.users'].browse(cr, uid, user_id).lang
record_ids = model.search(cr, uid, domain, context=ctx)
# determine when action should occur for the records
date_field = action.trg_date_id.name
if date_field == 'date_action_last' and 'create_date' in model._all_columns:
get_record_dt = lambda record: record[date_field] or record.create_date
else:
get_record_dt = lambda record: record[date_field]
# process action on the records that should be executed
for record in model.browse(cr, uid, record_ids, context=context):
record_dt = get_record_dt(record)
if not record_dt:
continue
action_dt = self._check_delay(cr, uid, action, record, record_dt, context=context)
if last_run <= action_dt < now:
try:
context = dict(context or {}, action=True)
self._process(cr, uid, action, [record.id], context=context)
except Exception:
import traceback
_logger.error(traceback.format_exc())
action.write({'last_run': now.strftime(DEFAULT_SERVER_DATETIME_FORMAT)})
if automatic:
# auto-commit for batch processing
cr.commit()
| agpl-3.0 | 6,269,191,399,835,874,000 | 49.143312 | 155 | 0.574913 | false |
Yannig/ansible | lib/ansible/modules/web_infrastructure/ansible_tower/tower_inventory.py | 15 | 5168 | #!/usr/bin/python
# coding: utf-8 -*-
# (c) 2017, Wayne Witzel III <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_inventory
version_added: "2.3"
author: "Wayne Witzel III (@wwitzel3)"
short_description: create, update, or destroy Ansible Tower inventory.
description:
- Create, update, or destroy Ansible Tower inventories. See
U(https://www.ansible.com/tower) for an overview.
options:
name:
description:
- The name to use for the inventory.
required: True
description:
description:
- The description to use for the inventory.
required: False
default: null
organization:
description:
- Organization the inventory belongs to.
required: True
variables:
description:
- Inventory variables. Use '@' to get from file.
required: False
default: null
state:
description:
- Desired state of the resource.
required: False
default: "present"
choices: ["present", "absent"]
tower_host:
description:
- URL to your Tower instance.
required: False
default: null
tower_username:
description:
- Username for your Tower instance.
required: False
default: null
tower_password:
description:
- Password for your Tower instance.
required: False
default: null
tower_verify_ssl:
description:
- Dis/allow insecure connections to Tower. If C(no), SSL certificates will not be validated.
This should only be used on personally controlled sites using self-signed certificates.
required: False
default: True
tower_config_file:
description:
- Path to the Tower config file. See notes.
required: False
default: null
requirements:
- "python >= 2.6"
- "ansible-tower-cli >= 3.0.3"
notes:
- If no I(config_file) is provided we will attempt to use the tower-cli library
defaults to find your Tower host information.
- I(config_file) should contain Tower configuration in the following format
host=hostname
username=username
password=password
'''
EXAMPLES = '''
- name: Add tower inventory
tower_inventory:
name: "Foo Inventory"
description: "Our Foo Cloud Servers"
organization: "Bar Org"
state: present
tower_config_file: "~/tower_cli.cfg"
'''
try:
import tower_cli
import tower_cli.utils.exceptions as exc
from tower_cli.conf import settings
from ansible.module_utils.ansible_tower import tower_auth_config, tower_check_mode
HAS_TOWER_CLI = True
except ImportError:
HAS_TOWER_CLI = False
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
description=dict(),
organization=dict(required=True),
variables=dict(),
tower_host=dict(),
tower_username=dict(),
tower_password=dict(no_log=True),
tower_verify_ssl=dict(type='bool', default=True),
tower_config_file=dict(type='path'),
state=dict(choices=['present', 'absent'], default='present'),
),
supports_check_mode=True
)
if not HAS_TOWER_CLI:
module.fail_json(msg='ansible-tower-cli required for this module')
name = module.params.get('name')
description = module.params.get('description')
organization = module.params.get('organization')
variables = module.params.get('variables')
state = module.params.get('state')
json_output = {'inventory': name, 'state': state}
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
inventory = tower_cli.get_resource('inventory')
try:
org_res = tower_cli.get_resource('organization')
org = org_res.get(name=organization)
if state == 'present':
result = inventory.modify(name=name, organization=org['id'], variables=variables,
description=description, create_on_missing=True)
json_output['id'] = result['id']
elif state == 'absent':
result = inventory.delete(name=name, organization=org['id'])
except (exc.NotFound) as excinfo:
module.fail_json(msg='Failed to update inventory, organization not found: {0}'.format(excinfo), changed=False)
except (exc.ConnectionError, exc.BadRequest) as excinfo:
module.fail_json(msg='Failed to update inventory: {0}'.format(excinfo), changed=False)
json_output['changed'] = result['changed']
module.exit_json(**json_output)
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()
| gpl-3.0 | 3,951,237,933,088,589,000 | 29.579882 | 122 | 0.625967 | false |
nicebug/SuckerTest | web/wsgiserver/ssl_pyopenssl.py | 177 | 9605 | """A library for integrating pyOpenSSL with CherryPy.
The OpenSSL module must be importable for SSL functionality.
You can obtain it from http://pyopenssl.sourceforge.net/
To use this module, set CherryPyWSGIServer.ssl_adapter to an instance of
SSLAdapter. There are two ways to use SSL:
Method One
----------
* ``ssl_adapter.context``: an instance of SSL.Context.
If this is not None, it is assumed to be an SSL.Context instance,
and will be passed to SSL.Connection on bind(). The developer is
responsible for forming a valid Context object. This approach is
to be preferred for more flexibility, e.g. if the cert and key are
streams instead of files, or need decryption, or SSL.SSLv3_METHOD
is desired instead of the default SSL.SSLv23_METHOD, etc. Consult
the pyOpenSSL documentation for complete options.
Method Two (shortcut)
---------------------
* ``ssl_adapter.certificate``: the filename of the server SSL certificate.
* ``ssl_adapter.private_key``: the filename of the server's private key file.
Both are None by default. If ssl_adapter.context is None, but .private_key
and .certificate are both given and valid, they will be read, and the
context will be automatically created from them.
"""
import socket
import threading
import time
from cherrypy import wsgiserver
try:
from OpenSSL import SSL
from OpenSSL import crypto
except ImportError:
SSL = None
class SSL_fileobject(wsgiserver.CP_fileobject):
"""SSL file object attached to a socket object."""
ssl_timeout = 3
ssl_retry = .01
def _safe_call(self, is_reader, call, *args, **kwargs):
"""Wrap the given call with SSL error-trapping.
is_reader: if False EOF errors will be raised. If True, EOF errors
will return "" (to emulate normal sockets).
"""
start = time.time()
while True:
try:
return call(*args, **kwargs)
except SSL.WantReadError:
# Sleep and try again. This is dangerous, because it means
# the rest of the stack has no way of differentiating
# between a "new handshake" error and "client dropped".
# Note this isn't an endless loop: there's a timeout below.
time.sleep(self.ssl_retry)
except SSL.WantWriteError:
time.sleep(self.ssl_retry)
except SSL.SysCallError, e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return ""
errnum = e.args[0]
if is_reader and errnum in wsgiserver.socket_errors_to_ignore:
return ""
raise socket.error(errnum)
except SSL.Error, e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return ""
thirdarg = None
try:
thirdarg = e.args[0][0][2]
except IndexError:
pass
if thirdarg == 'http request':
# The client is talking HTTP to an HTTPS server.
raise wsgiserver.NoSSLError()
raise wsgiserver.FatalSSLAlert(*e.args)
except:
raise
if time.time() - start > self.ssl_timeout:
raise socket.timeout("timed out")
def recv(self, *args, **kwargs):
buf = []
r = super(SSL_fileobject, self).recv
while True:
data = self._safe_call(True, r, *args, **kwargs)
buf.append(data)
p = self._sock.pending()
if not p:
return "".join(buf)
def sendall(self, *args, **kwargs):
return self._safe_call(False, super(SSL_fileobject, self).sendall,
*args, **kwargs)
def send(self, *args, **kwargs):
return self._safe_call(False, super(SSL_fileobject, self).send,
*args, **kwargs)
class SSLConnection:
"""A thread-safe wrapper for an SSL.Connection.
``*args``: the arguments to create the wrapped ``SSL.Connection(*args)``.
"""
def __init__(self, *args):
self._ssl_conn = SSL.Connection(*args)
self._lock = threading.RLock()
for f in ('get_context', 'pending', 'send', 'write', 'recv', 'read',
'renegotiate', 'bind', 'listen', 'connect', 'accept',
'setblocking', 'fileno', 'close', 'get_cipher_list',
'getpeername', 'getsockname', 'getsockopt', 'setsockopt',
'makefile', 'get_app_data', 'set_app_data', 'state_string',
'sock_shutdown', 'get_peer_certificate', 'want_read',
'want_write', 'set_connect_state', 'set_accept_state',
'connect_ex', 'sendall', 'settimeout', 'gettimeout'):
exec("""def %s(self, *args):
self._lock.acquire()
try:
return self._ssl_conn.%s(*args)
finally:
self._lock.release()
""" % (f, f))
def shutdown(self, *args):
self._lock.acquire()
try:
# pyOpenSSL.socket.shutdown takes no args
return self._ssl_conn.shutdown()
finally:
self._lock.release()
class pyOpenSSLAdapter(wsgiserver.SSLAdapter):
"""A wrapper for integrating pyOpenSSL with CherryPy."""
context = None
"""An instance of SSL.Context."""
certificate = None
"""The filename of the server SSL certificate."""
private_key = None
"""The filename of the server's private key file."""
certificate_chain = None
"""Optional. The filename of CA's intermediate certificate bundle.
This is needed for cheaper "chained root" SSL certificates, and should be
left as None if not required."""
def __init__(self, certificate, private_key, certificate_chain=None):
if SSL is None:
raise ImportError("You must install pyOpenSSL to use HTTPS.")
self.context = None
self.certificate = certificate
self.private_key = private_key
self.certificate_chain = certificate_chain
self._environ = None
def bind(self, sock):
"""Wrap and return the given socket."""
if self.context is None:
self.context = self.get_context()
conn = SSLConnection(self.context, sock)
self._environ = self.get_environ()
return conn
def wrap(self, sock):
"""Wrap and return the given socket, plus WSGI environ entries."""
return sock, self._environ.copy()
def get_context(self):
"""Return an SSL.Context from self attributes."""
# See http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/442473
c = SSL.Context(SSL.SSLv23_METHOD)
c.use_privatekey_file(self.private_key)
if self.certificate_chain:
c.load_verify_locations(self.certificate_chain)
c.use_certificate_file(self.certificate)
return c
def get_environ(self):
"""Return WSGI environ entries to be merged into each request."""
ssl_environ = {
"HTTPS": "on",
# pyOpenSSL doesn't provide access to any of these AFAICT
## 'SSL_PROTOCOL': 'SSLv2',
## SSL_CIPHER string The cipher specification name
## SSL_VERSION_INTERFACE string The mod_ssl program version
## SSL_VERSION_LIBRARY string The OpenSSL program version
}
if self.certificate:
# Server certificate attributes
cert = open(self.certificate, 'rb').read()
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
ssl_environ.update({
'SSL_SERVER_M_VERSION': cert.get_version(),
'SSL_SERVER_M_SERIAL': cert.get_serial_number(),
## 'SSL_SERVER_V_START': Validity of server's certificate (start time),
## 'SSL_SERVER_V_END': Validity of server's certificate (end time),
})
for prefix, dn in [("I", cert.get_issuer()),
("S", cert.get_subject())]:
# X509Name objects don't seem to have a way to get the
# complete DN string. Use str() and slice it instead,
# because str(dn) == "<X509Name object '/C=US/ST=...'>"
dnstr = str(dn)[18:-2]
wsgikey = 'SSL_SERVER_%s_DN' % prefix
ssl_environ[wsgikey] = dnstr
# The DN should be of the form: /k1=v1/k2=v2, but we must allow
# for any value to contain slashes itself (in a URL).
while dnstr:
pos = dnstr.rfind("=")
dnstr, value = dnstr[:pos], dnstr[pos + 1:]
pos = dnstr.rfind("/")
dnstr, key = dnstr[:pos], dnstr[pos + 1:]
if key and value:
wsgikey = 'SSL_SERVER_%s_DN_%s' % (prefix, key)
ssl_environ[wsgikey] = value
return ssl_environ
def makefile(self, sock, mode='r', bufsize=-1):
if SSL and isinstance(sock, SSL.ConnectionType):
timeout = sock.gettimeout()
f = SSL_fileobject(sock, mode, bufsize)
f.ssl_timeout = timeout
return f
else:
return wsgiserver.CP_fileobject(sock, mode, bufsize)
| mit | -2,352,369,593,103,696,000 | 36.519531 | 86 | 0.5595 | false |
scorphus/passpie | passpie/database.py | 2 | 4638 | from datetime import datetime
import logging
import os
import shutil
from tinydb import TinyDB, Storage, where, Query
import yaml
from .utils import mkdir_open
from .history import Repository
from .credential import split_fullname, make_fullname
class PasspieStorage(Storage):
extension = ".pass"
def __init__(self, path):
super(PasspieStorage, self).__init__()
self.path = path
def make_credpath(self, name, login):
dirname, filename = name, login + self.extension
credpath = os.path.join(self.path, dirname, filename)
return credpath
def delete(self, credentials):
for cred in credentials:
credpath = self.make_credpath(cred["name"], cred["login"])
os.remove(credpath)
if not os.listdir(os.path.dirname(credpath)):
shutil.rmtree(os.path.dirname(credpath))
def read(self):
elements = []
for rootdir, dirs, files in os.walk(self.path):
filenames = [f for f in files if f.endswith(self.extension)]
for filename in filenames:
docpath = os.path.join(rootdir, filename)
with open(docpath) as f:
elements.append(yaml.load(f.read()))
return {"_default":
{idx: elem for idx, elem in enumerate(elements, start=1)}}
def write(self, data):
deleted = [c for c in self.read()["_default"].values()
if c not in data["_default"].values()]
self.delete(deleted)
for eid, cred in data["_default"].items():
credpath = self.make_credpath(cred["name"], cred["login"])
with mkdir_open(credpath, "w") as f:
f.write(yaml.safe_dump(dict(cred), default_flow_style=False))
class Database(TinyDB):
def __init__(self, config, storage=PasspieStorage):
self.config = config
self.path = config['path']
self.repo = Repository(self.path,
autopull=config.get('autopull'),
autopush=config.get('autopush'))
PasspieStorage.extension = config['extension']
super(Database, self).__init__(self.path, storage=storage)
def has_keys(self):
return os.path.exists(os.path.join(self.path, '.keys'))
def filename(self, fullname):
login, name = split_fullname(fullname)
return self._storage.make_credpath(name=name, login=login)
def credential(self, fullname):
login, name = split_fullname(fullname)
Credential = Query()
if login is None:
creds = self.get(Credential.name == name)
else:
creds = self.get((Credential.login == login) & (Credential.name == name))
return creds
def add(self, fullname, password, comment):
login, name = split_fullname(fullname)
if login is None:
logging.error('Cannot add credential with empty login. use "@<name>" syntax')
return None
credential = dict(fullname=fullname,
name=name,
login=login,
password=password,
comment=comment,
modified=datetime.now())
self.insert(credential)
return credential
def update(self, fullname, values):
login, name = split_fullname(fullname)
values['fullname'] = make_fullname(values["login"], values["name"])
values['modified'] = datetime.now()
Credential = Query()
if login is None:
query = (Credential.name == name)
else:
query = ((Credential.login == login) & (Credential.name == name))
self.table().update(values, query)
def credentials(self, fullname=None):
if fullname:
login, name = split_fullname(fullname)
Credential = Query()
if login is None:
creds = self.search(Credential.name == name)
else:
creds = self.search((Credential.login == login) & (Credential.name == name))
else:
creds = self.all()
return sorted(creds, key=lambda x: x["name"] + x["login"])
def remove(self, fullname):
self.table().remove(where('fullname') == fullname)
def matches(self, regex):
Credential = Query()
credentials = self.search(
Credential.name.matches(regex) |
Credential.login.matches(regex) |
Credential.comment.matches(regex)
)
return sorted(credentials, key=lambda x: x["name"] + x["login"])
| mit | -3,531,396,421,367,122,000 | 34.676923 | 92 | 0.57417 | false |
bharatsingh430/py-R-FCN-multiGPU | lib/datasets/voc_eval.py | 12 | 6938 | # --------------------------------------------------------
# Fast/er R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by Bharath Hariharan
# --------------------------------------------------------
import xml.etree.ElementTree as ET
import os
import cPickle
import numpy as np
def parse_rec(filename):
""" Parse a PASCAL VOC xml file """
tree = ET.parse(filename)
objects = []
for obj in tree.findall('object'):
obj_struct = {}
obj_struct['name'] = obj.find('name').text
obj_struct['pose'] = obj.find('pose').text
obj_struct['truncated'] = int(obj.find('truncated').text)
obj_struct['difficult'] = int(obj.find('difficult').text)
bbox = obj.find('bndbox')
obj_struct['bbox'] = [int(bbox.find('xmin').text),
int(bbox.find('ymin').text),
int(bbox.find('xmax').text),
int(bbox.find('ymax').text)]
objects.append(obj_struct)
return objects
def voc_ap(rec, prec, use_07_metric=False):
""" ap = voc_ap(rec, prec, [use_07_metric])
Compute VOC AP given precision and recall.
If use_07_metric is true, uses the
VOC 07 11 point method (default:False).
"""
if use_07_metric:
# 11 point metric
ap = 0.
for t in np.arange(0., 1.1, 0.1):
if np.sum(rec >= t) == 0:
p = 0
else:
p = np.max(prec[rec >= t])
ap = ap + p / 11.
else:
# correct AP calculation
# first append sentinel values at the end
mrec = np.concatenate(([0.], rec, [1.]))
mpre = np.concatenate(([0.], prec, [0.]))
# compute the precision envelope
for i in range(mpre.size - 1, 0, -1):
mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
def voc_eval(detpath,
annopath,
imagesetfile,
classname,
cachedir,
ovthresh=0.5,
use_07_metric=False):
"""rec, prec, ap = voc_eval(detpath,
annopath,
imagesetfile,
classname,
[ovthresh],
[use_07_metric])
Top level function that does the PASCAL VOC evaluation.
detpath: Path to detections
detpath.format(classname) should produce the detection results file.
annopath: Path to annotations
annopath.format(imagename) should be the xml annotations file.
imagesetfile: Text file containing the list of images, one image per line.
classname: Category name (duh)
cachedir: Directory for caching the annotations
[ovthresh]: Overlap threshold (default = 0.5)
[use_07_metric]: Whether to use VOC07's 11 point AP computation
(default False)
"""
# assumes detections are in detpath.format(classname)
# assumes annotations are in annopath.format(imagename)
# assumes imagesetfile is a text file with each line an image name
# cachedir caches the annotations in a pickle file
# first load gt
if not os.path.isdir(cachedir):
os.mkdir(cachedir)
cachefile = os.path.join(cachedir, 'annots.pkl')
# read list of images
with open(imagesetfile, 'r') as f:
lines = f.readlines()
imagenames = [x.strip() for x in lines]
if not os.path.isfile(cachefile):
# load annots
recs = {}
for i, imagename in enumerate(imagenames):
recs[imagename] = parse_rec(annopath.format(imagename))
if i % 100 == 0:
print 'Reading annotation for {:d}/{:d}'.format(
i + 1, len(imagenames))
# save
print 'Saving cached annotations to {:s}'.format(cachefile)
with open(cachefile, 'w') as f:
cPickle.dump(recs, f)
else:
# load
with open(cachefile, 'r') as f:
recs = cPickle.load(f)
# extract gt objects for this class
class_recs = {}
npos = 0
for imagename in imagenames:
R = [obj for obj in recs[imagename] if obj['name'] == classname]
bbox = np.array([x['bbox'] for x in R])
difficult = np.array([x['difficult'] for x in R]).astype(np.bool)
det = [False] * len(R)
npos = npos + sum(~difficult)
class_recs[imagename] = {'bbox': bbox,
'difficult': difficult,
'det': det}
# read dets
detfile = detpath.format(classname)
with open(detfile, 'r') as f:
lines = f.readlines()
splitlines = [x.strip().split(' ') for x in lines]
image_ids = [x[0] for x in splitlines]
confidence = np.array([float(x[1]) for x in splitlines])
BB = np.array([[float(z) for z in x[2:]] for x in splitlines])
# sort by confidence
sorted_ind = np.argsort(-confidence)
sorted_scores = np.sort(-confidence)
BB = BB[sorted_ind, :]
image_ids = [image_ids[x] for x in sorted_ind]
# go down dets and mark TPs and FPs
nd = len(image_ids)
tp = np.zeros(nd)
fp = np.zeros(nd)
for d in range(nd):
R = class_recs[image_ids[d]]
bb = BB[d, :].astype(float)
ovmax = -np.inf
BBGT = R['bbox'].astype(float)
if BBGT.size > 0:
# compute overlaps
# intersection
ixmin = np.maximum(BBGT[:, 0], bb[0])
iymin = np.maximum(BBGT[:, 1], bb[1])
ixmax = np.minimum(BBGT[:, 2], bb[2])
iymax = np.minimum(BBGT[:, 3], bb[3])
iw = np.maximum(ixmax - ixmin + 1., 0.)
ih = np.maximum(iymax - iymin + 1., 0.)
inters = iw * ih
# union
uni = ((bb[2] - bb[0] + 1.) * (bb[3] - bb[1] + 1.) +
(BBGT[:, 2] - BBGT[:, 0] + 1.) *
(BBGT[:, 3] - BBGT[:, 1] + 1.) - inters)
overlaps = inters / uni
ovmax = np.max(overlaps)
jmax = np.argmax(overlaps)
if ovmax > ovthresh:
if not R['difficult'][jmax]:
if not R['det'][jmax]:
tp[d] = 1.
R['det'][jmax] = 1
else:
fp[d] = 1.
else:
fp[d] = 1.
# compute precision recall
fp = np.cumsum(fp)
tp = np.cumsum(tp)
rec = tp / float(npos)
# avoid divide by zero in case the first detection matches a difficult
# ground truth
prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)
ap = voc_ap(rec, prec, use_07_metric)
return rec, prec, ap
| mit | -5,088,211,684,464,472,000 | 33.69 | 78 | 0.518882 | false |
SnappleCap/oh-mainline | vendor/packages/gdata/src/gdata/alt/appengine.py | 77 | 11666 | #!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides HTTP functions for gdata.service to use on Google App Engine
AppEngineHttpClient: Provides an HTTP request method which uses App Engine's
urlfetch API. Set the http_client member of a GDataService object to an
instance of an AppEngineHttpClient to allow the gdata library to run on
Google App Engine.
run_on_appengine: Function which will modify an existing GDataService object
to allow it to run on App Engine. It works by creating a new instance of
the AppEngineHttpClient and replacing the GDataService object's
http_client.
"""
__author__ = 'api.jscudder (Jeff Scudder)'
import StringIO
import pickle
import atom.http_interface
import atom.token_store
from google.appengine.api import urlfetch
from google.appengine.ext import db
from google.appengine.api import users
from google.appengine.api import memcache
def run_on_appengine(gdata_service, store_tokens=True,
single_user_mode=False, deadline=None):
"""Modifies a GDataService object to allow it to run on App Engine.
Args:
gdata_service: An instance of AtomService, GDataService, or any
of their subclasses which has an http_client member and a
token_store member.
store_tokens: Boolean, defaults to True. If True, the gdata_service
will attempt to add each token to it's token_store when
SetClientLoginToken or SetAuthSubToken is called. If False
the tokens will not automatically be added to the
token_store.
single_user_mode: Boolean, defaults to False. If True, the current_token
member of gdata_service will be set when
SetClientLoginToken or SetAuthTubToken is called. If set
to True, the current_token is set in the gdata_service
and anyone who accesses the object will use the same
token.
Note: If store_tokens is set to False and
single_user_mode is set to False, all tokens will be
ignored, since the library assumes: the tokens should not
be stored in the datastore and they should not be stored
in the gdata_service object. This will make it
impossible to make requests which require authorization.
deadline: int (optional) The number of seconds to wait for a response
before timing out on the HTTP request. If no deadline is
specified, the deafault deadline for HTTP requests from App
Engine is used. The maximum is currently 10 (for 10 seconds).
The default deadline for App Engine is 5 seconds.
"""
gdata_service.http_client = AppEngineHttpClient(deadline=deadline)
gdata_service.token_store = AppEngineTokenStore()
gdata_service.auto_store_tokens = store_tokens
gdata_service.auto_set_current_token = single_user_mode
return gdata_service
class AppEngineHttpClient(atom.http_interface.GenericHttpClient):
def __init__(self, headers=None, deadline=None):
self.debug = False
self.headers = headers or {}
self.deadline = deadline
def request(self, operation, url, data=None, headers=None):
"""Performs an HTTP call to the server, supports GET, POST, PUT, and
DELETE.
Usage example, perform and HTTP GET on http://www.google.com/:
import atom.http
client = atom.http.HttpClient()
http_response = client.request('GET', 'http://www.google.com/')
Args:
operation: str The HTTP operation to be performed. This is usually one
of 'GET', 'POST', 'PUT', or 'DELETE'
data: filestream, list of parts, or other object which can be converted
to a string. Should be set to None when performing a GET or DELETE.
If data is a file-like object which can be read, this method will
read a chunk of 100K bytes at a time and send them.
If the data is a list of parts to be sent, each part will be
evaluated and sent.
url: The full URL to which the request should be sent. Can be a string
or atom.url.Url.
headers: dict of strings. HTTP headers which should be sent
in the request.
"""
all_headers = self.headers.copy()
if headers:
all_headers.update(headers)
# Construct the full payload.
# Assume that data is None or a string.
data_str = data
if data:
if isinstance(data, list):
# If data is a list of different objects, convert them all to strings
# and join them together.
converted_parts = [_convert_data_part(x) for x in data]
data_str = ''.join(converted_parts)
else:
data_str = _convert_data_part(data)
# If the list of headers does not include a Content-Length, attempt to
# calculate it based on the data object.
if data and 'Content-Length' not in all_headers:
all_headers['Content-Length'] = str(len(data_str))
# Set the content type to the default value if none was set.
if 'Content-Type' not in all_headers:
all_headers['Content-Type'] = 'application/atom+xml'
# Lookup the urlfetch operation which corresponds to the desired HTTP verb.
if operation == 'GET':
method = urlfetch.GET
elif operation == 'POST':
method = urlfetch.POST
elif operation == 'PUT':
method = urlfetch.PUT
elif operation == 'DELETE':
method = urlfetch.DELETE
else:
method = None
if self.deadline is None:
return HttpResponse(urlfetch.Fetch(url=str(url), payload=data_str,
method=method, headers=all_headers, follow_redirects=False))
return HttpResponse(urlfetch.Fetch(url=str(url), payload=data_str,
method=method, headers=all_headers, follow_redirects=False,
deadline=self.deadline))
def _convert_data_part(data):
if not data or isinstance(data, str):
return data
elif hasattr(data, 'read'):
# data is a file like object, so read it completely.
return data.read()
# The data object was not a file.
# Try to convert to a string and send the data.
return str(data)
class HttpResponse(object):
"""Translates a urlfetch resoinse to look like an hhtplib resoinse.
Used to allow the resoinse from HttpRequest to be usable by gdata.service
methods.
"""
def __init__(self, urlfetch_response):
self.body = StringIO.StringIO(urlfetch_response.content)
self.headers = urlfetch_response.headers
self.status = urlfetch_response.status_code
self.reason = ''
def read(self, length=None):
if not length:
return self.body.read()
else:
return self.body.read(length)
def getheader(self, name):
if not self.headers.has_key(name):
return self.headers[name.lower()]
return self.headers[name]
class TokenCollection(db.Model):
"""Datastore Model which associates auth tokens with the current user."""
user = db.UserProperty()
pickled_tokens = db.BlobProperty()
class AppEngineTokenStore(atom.token_store.TokenStore):
"""Stores the user's auth tokens in the App Engine datastore.
Tokens are only written to the datastore if a user is signed in (if
users.get_current_user() returns a user object).
"""
def __init__(self):
self.user = None
def add_token(self, token):
"""Associates the token with the current user and stores it.
If there is no current user, the token will not be stored.
Returns:
False if the token was not stored.
"""
tokens = load_auth_tokens(self.user)
if not hasattr(token, 'scopes') or not token.scopes:
return False
for scope in token.scopes:
tokens[str(scope)] = token
key = save_auth_tokens(tokens, self.user)
if key:
return True
return False
def find_token(self, url):
"""Searches the current user's collection of token for a token which can
be used for a request to the url.
Returns:
The stored token which belongs to the current user and is valid for the
desired URL. If there is no current user, or there is no valid user
token in the datastore, a atom.http_interface.GenericToken is returned.
"""
if url is None:
return None
if isinstance(url, (str, unicode)):
url = atom.url.parse_url(url)
tokens = load_auth_tokens(self.user)
if url in tokens:
token = tokens[url]
if token.valid_for_scope(url):
return token
else:
del tokens[url]
save_auth_tokens(tokens, self.user)
for scope, token in tokens.iteritems():
if token.valid_for_scope(url):
return token
return atom.http_interface.GenericToken()
def remove_token(self, token):
"""Removes the token from the current user's collection in the datastore.
Returns:
False if the token was not removed, this could be because the token was
not in the datastore, or because there is no current user.
"""
token_found = False
scopes_to_delete = []
tokens = load_auth_tokens(self.user)
for scope, stored_token in tokens.iteritems():
if stored_token == token:
scopes_to_delete.append(scope)
token_found = True
for scope in scopes_to_delete:
del tokens[scope]
if token_found:
save_auth_tokens(tokens, self.user)
return token_found
def remove_all_tokens(self):
"""Removes all of the current user's tokens from the datastore."""
save_auth_tokens({}, self.user)
def save_auth_tokens(token_dict, user=None):
"""Associates the tokens with the current user and writes to the datastore.
If there us no current user, the tokens are not written and this function
returns None.
Returns:
The key of the datastore entity containing the user's tokens, or None if
there was no current user.
"""
if user is None:
user = users.get_current_user()
if user is None:
return None
memcache.set('gdata_pickled_tokens:%s' % user, pickle.dumps(token_dict))
user_tokens = TokenCollection.all().filter('user =', user).get()
if user_tokens:
user_tokens.pickled_tokens = pickle.dumps(token_dict)
return user_tokens.put()
else:
user_tokens = TokenCollection(
user=user,
pickled_tokens=pickle.dumps(token_dict))
return user_tokens.put()
def load_auth_tokens(user=None):
"""Reads a dictionary of the current user's tokens from the datastore.
If there is no current user (a user is not signed in to the app) or the user
does not have any tokens, an empty dictionary is returned.
"""
if user is None:
user = users.get_current_user()
if user is None:
return {}
pickled_tokens = memcache.get('gdata_pickled_tokens:%s' % user)
if pickled_tokens:
return pickle.loads(pickled_tokens)
user_tokens = TokenCollection.all().filter('user =', user).get()
if user_tokens:
memcache.set('gdata_pickled_tokens:%s' % user, user_tokens.pickled_tokens)
return pickle.loads(user_tokens.pickled_tokens)
return {}
| agpl-3.0 | 8,934,527,723,085,592,000 | 35.342679 | 79 | 0.674781 | false |
JKatzwinkel/mps-youtube | mps_youtube/c.py | 1 | 1141 | """ Module for holding colour code values. """
import os
import re
import sys
try:
# pylint: disable=F0401
from colorama import Fore, Style
has_colorama = True
except ImportError:
has_colorama = False
mswin = os.name == "nt"
if mswin and has_colorama:
white = Style.RESET_ALL
ul = Style.DIM + Fore.YELLOW
red, green, yellow = Fore.RED, Fore.GREEN, Fore.YELLOW
blue, pink = Fore.CYAN, Fore.MAGENTA
elif mswin:
ul = red = green = yellow = blue = pink = white = ""
elif sys.stdout.isatty():
white = "\x1b[%sm" % 0
ul = "\x1b[%sm" * 3 % (2, 4, 33)
cols = ["\x1b[%sm" % n for n in range(91, 96)]
red, green, yellow, blue, pink = cols
else:
ul = red = green = yellow = blue = pink = white = ""
r, g, y, b, p, w = red, green, yellow, blue, pink, white
ansirx = re.compile(r'\x1b\[\d*m', re.UNICODE)
def c(colour, text):
""" Return coloured text. """
colours = {'r': r, 'g': g, 'y': y, 'b':b, 'p':p}
return colours[colour] + text + w
def charcount(s):
""" Return number of characters in string, with ANSI color codes excluded. """
return len(ansirx.sub('', s))
| gpl-3.0 | -1,001,906,387,397,352,100 | 23.804348 | 82 | 0.597721 | false |
b0ttl3z/SickRage | lib/twilio/rest/resources/recordings.py | 23 | 1755 | from .util import normalize_dates
from .transcriptions import Transcriptions
from .base import InstanceResource, ListResource
class Recording(InstanceResource):
subresources = [Transcriptions]
def __init__(self, *args, **kwargs):
super(Recording, self).__init__(*args, **kwargs)
self.formats = {
"mp3": self.uri + ".mp3",
"wav": self.uri + ".wav",
}
def delete(self):
"""
Delete this recording
"""
return self.delete_instance()
class Recordings(ListResource):
name = "Recordings"
instance = Recording
@normalize_dates
def list(self, before=None, after=None, **kwargs):
"""
Returns a page of :class:`Recording` resources as a list.
For paging information see :class:`ListResource`.
:param date after: Only list recordings logged after this datetime
:param date before: Only list recordings logger before this datetime
:param call_sid: Only list recordings from this :class:`Call`
"""
kwargs["DateCreated<"] = before
kwargs["DateCreated>"] = after
return self.get_instances(kwargs)
@normalize_dates
def iter(self, before=None, after=None, **kwargs):
"""
Returns an iterator of :class:`Recording` resources.
:param date after: Only list recordings logged after this datetime
:param date before: Only list recordings logger before this datetime
"""
kwargs["DateCreated<"] = before
kwargs["DateCreated>"] = after
return super(Recordings, self).iter(**kwargs)
def delete(self, sid):
"""
Delete the given recording
"""
return self.delete_instance(sid)
| gpl-3.0 | 6,504,293,572,577,997,000 | 28.25 | 76 | 0.617664 | false |
mhbu50/erpnext | erpnext/crm/report/lead_owner_efficiency/lead_owner_efficiency.py | 3 | 1463 | # Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from erpnext.crm.report.campaign_efficiency.campaign_efficiency import get_lead_data
def execute(filters=None):
columns, data = [], []
columns=get_columns()
data=get_lead_data(filters, "Lead Owner")
return columns, data
def get_columns():
return [
{
"fieldname": "lead_owner",
"label": _("Lead Owner"),
"fieldtype": "Link",
"options": "User",
"width": "130"
},
{
"fieldname": "lead_count",
"label": _("Lead Count"),
"fieldtype": "Int",
"width": "80"
},
{
"fieldname": "opp_count",
"label": _("Opp Count"),
"fieldtype": "Int",
"width": "80"
},
{
"fieldname": "quot_count",
"label": _("Quot Count"),
"fieldtype": "Int",
"width": "80"
},
{
"fieldname": "order_count",
"label": _("Order Count"),
"fieldtype": "Int",
"width": "100"
},
{
"fieldname": "order_value",
"label": _("Order Value"),
"fieldtype": "Float",
"width": "100"
},
{
"fieldname": "opp_lead",
"label": _("Opp/Lead %"),
"fieldtype": "Float",
"width": "100"
},
{
"fieldname": "quot_lead",
"label": _("Quot/Lead %"),
"fieldtype": "Float",
"width": "100"
},
{
"fieldname": "order_quot",
"label": _("Order/Quot %"),
"fieldtype": "Float",
"width": "100"
}
]
| gpl-3.0 | -2,346,251,921,959,471,600 | 19.319444 | 84 | 0.561859 | false |
boshnivolo/TIY-Assignments | node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/input_test.py | 1841 | 3207 | #!/usr/bin/env python
# Copyright 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the input.py file."""
import gyp.input
import unittest
import sys
class TestFindCycles(unittest.TestCase):
def setUp(self):
self.nodes = {}
for x in ('a', 'b', 'c', 'd', 'e'):
self.nodes[x] = gyp.input.DependencyGraphNode(x)
def _create_dependency(self, dependent, dependency):
dependent.dependencies.append(dependency)
dependency.dependents.append(dependent)
def test_no_cycle_empty_graph(self):
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_line(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_dag(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['a'], self.nodes['c'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_cycle_self_reference(self):
self._create_dependency(self.nodes['a'], self.nodes['a'])
self.assertEquals([[self.nodes['a'], self.nodes['a']]],
self.nodes['a'].FindCycles())
def test_cycle_two_nodes(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
self.nodes['a'].FindCycles())
self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
self.nodes['b'].FindCycles())
def test_two_cycles(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['b'])
cycles = self.nodes['a'].FindCycles()
self.assertTrue(
[self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
self.assertTrue(
[self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
self.assertEquals(2, len(cycles))
def test_big_cycle(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
self._create_dependency(self.nodes['d'], self.nodes['e'])
self._create_dependency(self.nodes['e'], self.nodes['a'])
self.assertEquals([[self.nodes['a'],
self.nodes['b'],
self.nodes['c'],
self.nodes['d'],
self.nodes['e'],
self.nodes['a']]],
self.nodes['a'].FindCycles())
if __name__ == '__main__':
unittest.main()
| cc0-1.0 | 5,005,541,937,834,412,000 | 34.633333 | 76 | 0.606174 | false |
nirmeshk/oh-mainline | vendor/packages/gdata/src/gdata/tlslite/TLSConnection.py | 278 | 70347 | """
MAIN CLASS FOR TLS LITE (START HERE!).
"""
from __future__ import generators
import socket
from utils.compat import formatExceptionTrace
from TLSRecordLayer import TLSRecordLayer
from Session import Session
from constants import *
from utils.cryptomath import getRandomBytes
from errors import *
from messages import *
from mathtls import *
from HandshakeSettings import HandshakeSettings
class TLSConnection(TLSRecordLayer):
"""
This class wraps a socket and provides TLS handshaking and data
transfer.
To use this class, create a new instance, passing a connected
socket into the constructor. Then call some handshake function.
If the handshake completes without raising an exception, then a TLS
connection has been negotiated. You can transfer data over this
connection as if it were a socket.
This class provides both synchronous and asynchronous versions of
its key functions. The synchronous versions should be used when
writing single-or multi-threaded code using blocking sockets. The
asynchronous versions should be used when performing asynchronous,
event-based I/O with non-blocking sockets.
Asynchronous I/O is a complicated subject; typically, you should
not use the asynchronous functions directly, but should use some
framework like asyncore or Twisted which TLS Lite integrates with
(see
L{tlslite.integration.TLSAsyncDispatcherMixIn.TLSAsyncDispatcherMixIn} or
L{tlslite.integration.TLSTwistedProtocolWrapper.TLSTwistedProtocolWrapper}).
"""
def __init__(self, sock):
"""Create a new TLSConnection instance.
@param sock: The socket data will be transmitted on. The
socket should already be connected. It may be in blocking or
non-blocking mode.
@type sock: L{socket.socket}
"""
TLSRecordLayer.__init__(self, sock)
def handshakeClientSRP(self, username, password, session=None,
settings=None, checker=None, async=False):
"""Perform an SRP handshake in the role of client.
This function performs a TLS/SRP handshake. SRP mutually
authenticates both parties to each other using only a
username and password. This function may also perform a
combined SRP and server-certificate handshake, if the server
chooses to authenticate itself with a certificate chain in
addition to doing SRP.
TLS/SRP is non-standard. Most TLS implementations don't
support it. See
U{http://www.ietf.org/html.charters/tls-charter.html} or
U{http://trevp.net/tlssrp/} for the latest information on
TLS/SRP.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type username: str
@param username: The SRP username.
@type password: str
@param password: The SRP password.
@type session: L{tlslite.Session.Session}
@param session: A TLS session to attempt to resume. This
session must be an SRP session performed with the same username
and password as were passed in. If the resumption does not
succeed, a full SRP handshake will be performed.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(srpParams=(username, password),
session=session, settings=settings, checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def handshakeClientCert(self, certChain=None, privateKey=None,
session=None, settings=None, checker=None,
async=False):
"""Perform a certificate-based handshake in the role of client.
This function performs an SSL or TLS handshake. The server
will authenticate itself using an X.509 or cryptoID certificate
chain. If the handshake succeeds, the server's certificate
chain will be stored in the session's serverCertChain attribute.
Unless a checker object is passed in, this function does no
validation or checking of the server's certificate chain.
If the server requests client authentication, the
client will send the passed-in certificate chain, and use the
passed-in private key to authenticate itself. If no
certificate chain and private key were passed in, the client
will attempt to proceed without client authentication. The
server may or may not allow this.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type certChain: L{tlslite.X509CertChain.X509CertChain} or
L{cryptoIDlib.CertChain.CertChain}
@param certChain: The certificate chain to be used if the
server requests client authentication.
@type privateKey: L{tlslite.utils.RSAKey.RSAKey}
@param privateKey: The private key to be used if the server
requests client authentication.
@type session: L{tlslite.Session.Session}
@param session: A TLS session to attempt to resume. If the
resumption does not succeed, a full handshake will be
performed.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(certParams=(certChain,
privateKey), session=session, settings=settings,
checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def handshakeClientUnknown(self, srpCallback=None, certCallback=None,
session=None, settings=None, checker=None,
async=False):
"""Perform a to-be-determined type of handshake in the role of client.
This function performs an SSL or TLS handshake. If the server
requests client certificate authentication, the
certCallback will be invoked and should return a (certChain,
privateKey) pair. If the callback returns None, the library
will attempt to proceed without client authentication. The
server may or may not allow this.
If the server requests SRP authentication, the srpCallback
will be invoked and should return a (username, password) pair.
If the callback returns None, the local implementation will
signal a user_canceled error alert.
After the handshake completes, the client can inspect the
connection's session attribute to determine what type of
authentication was performed.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type srpCallback: callable
@param srpCallback: The callback to be used if the server
requests SRP authentication. If None, the client will not
offer support for SRP ciphersuites.
@type certCallback: callable
@param certCallback: The callback to be used if the server
requests client certificate authentication.
@type session: L{tlslite.Session.Session}
@param session: A TLS session to attempt to resume. If the
resumption does not succeed, a full handshake will be
performed.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(unknownParams=(srpCallback,
certCallback), session=session, settings=settings,
checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def handshakeClientSharedKey(self, username, sharedKey, settings=None,
checker=None, async=False):
"""Perform a shared-key handshake in the role of client.
This function performs a shared-key handshake. Using shared
symmetric keys of high entropy (128 bits or greater) mutually
authenticates both parties to each other.
TLS with shared-keys is non-standard. Most TLS
implementations don't support it. See
U{http://www.ietf.org/html.charters/tls-charter.html} for the
latest information on TLS with shared-keys. If the shared-keys
Internet-Draft changes or is superceded, TLS Lite will track
those changes, so the shared-key support in later versions of
TLS Lite may become incompatible with this version.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type username: str
@param username: The shared-key username.
@type sharedKey: str
@param sharedKey: The shared key.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(sharedKeyParams=(username,
sharedKey), settings=settings, checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def _handshakeClientAsync(self, srpParams=(), certParams=(),
unknownParams=(), sharedKeyParams=(),
session=None, settings=None, checker=None,
recursive=False):
handshaker = self._handshakeClientAsyncHelper(srpParams=srpParams,
certParams=certParams, unknownParams=unknownParams,
sharedKeyParams=sharedKeyParams, session=session,
settings=settings, recursive=recursive)
for result in self._handshakeWrapperAsync(handshaker, checker):
yield result
def _handshakeClientAsyncHelper(self, srpParams, certParams, unknownParams,
sharedKeyParams, session, settings, recursive):
if not recursive:
self._handshakeStart(client=True)
#Unpack parameters
srpUsername = None # srpParams
password = None # srpParams
clientCertChain = None # certParams
privateKey = None # certParams
srpCallback = None # unknownParams
certCallback = None # unknownParams
#session # sharedKeyParams (or session)
#settings # settings
if srpParams:
srpUsername, password = srpParams
elif certParams:
clientCertChain, privateKey = certParams
elif unknownParams:
srpCallback, certCallback = unknownParams
elif sharedKeyParams:
session = Session()._createSharedKey(*sharedKeyParams)
if not settings:
settings = HandshakeSettings()
settings = settings._filter()
#Validate parameters
if srpUsername and not password:
raise ValueError("Caller passed a username but no password")
if password and not srpUsername:
raise ValueError("Caller passed a password but no username")
if clientCertChain and not privateKey:
raise ValueError("Caller passed a certChain but no privateKey")
if privateKey and not clientCertChain:
raise ValueError("Caller passed a privateKey but no certChain")
if clientCertChain:
foundType = False
try:
import cryptoIDlib.CertChain
if isinstance(clientCertChain, cryptoIDlib.CertChain.CertChain):
if "cryptoID" not in settings.certificateTypes:
raise ValueError("Client certificate doesn't "\
"match Handshake Settings")
settings.certificateTypes = ["cryptoID"]
foundType = True
except ImportError:
pass
if not foundType and isinstance(clientCertChain,
X509CertChain):
if "x509" not in settings.certificateTypes:
raise ValueError("Client certificate doesn't match "\
"Handshake Settings")
settings.certificateTypes = ["x509"]
foundType = True
if not foundType:
raise ValueError("Unrecognized certificate type")
if session:
if not session.valid():
session = None #ignore non-resumable sessions...
elif session.resumable and \
(session.srpUsername != srpUsername):
raise ValueError("Session username doesn't match")
#Add Faults to parameters
if srpUsername and self.fault == Fault.badUsername:
srpUsername += "GARBAGE"
if password and self.fault == Fault.badPassword:
password += "GARBAGE"
if sharedKeyParams:
identifier = sharedKeyParams[0]
sharedKey = sharedKeyParams[1]
if self.fault == Fault.badIdentifier:
identifier += "GARBAGE"
session = Session()._createSharedKey(identifier, sharedKey)
elif self.fault == Fault.badSharedKey:
sharedKey += "GARBAGE"
session = Session()._createSharedKey(identifier, sharedKey)
#Initialize locals
serverCertChain = None
cipherSuite = 0
certificateType = CertificateType.x509
premasterSecret = None
#Get client nonce
clientRandom = getRandomBytes(32)
#Initialize acceptable ciphersuites
cipherSuites = []
if srpParams:
cipherSuites += CipherSuite.getSrpRsaSuites(settings.cipherNames)
cipherSuites += CipherSuite.getSrpSuites(settings.cipherNames)
elif certParams:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
elif unknownParams:
if srpCallback:
cipherSuites += \
CipherSuite.getSrpRsaSuites(settings.cipherNames)
cipherSuites += \
CipherSuite.getSrpSuites(settings.cipherNames)
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
elif sharedKeyParams:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
else:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
#Initialize acceptable certificate types
certificateTypes = settings._getCertificateTypes()
#Tentatively set the version to the client's minimum version.
#We'll use this for the ClientHello, and if an error occurs
#parsing the Server Hello, we'll use this version for the response
self.version = settings.maxVersion
#Either send ClientHello (with a resumable session)...
if session:
#If it's a resumable (i.e. not a shared-key session), then its
#ciphersuite must be one of the acceptable ciphersuites
if (not sharedKeyParams) and \
session.cipherSuite not in cipherSuites:
raise ValueError("Session's cipher suite not consistent "\
"with parameters")
else:
clientHello = ClientHello()
clientHello.create(settings.maxVersion, clientRandom,
session.sessionID, cipherSuites,
certificateTypes, session.srpUsername)
#Or send ClientHello (without)
else:
clientHello = ClientHello()
clientHello.create(settings.maxVersion, clientRandom,
createByteArraySequence([]), cipherSuites,
certificateTypes, srpUsername)
for result in self._sendMsg(clientHello):
yield result
#Get ServerHello (or missing_srp_username)
for result in self._getMsg((ContentType.handshake,
ContentType.alert),
HandshakeType.server_hello):
if result in (0,1):
yield result
else:
break
msg = result
if isinstance(msg, ServerHello):
serverHello = msg
elif isinstance(msg, Alert):
alert = msg
#If it's not a missing_srp_username, re-raise
if alert.description != AlertDescription.missing_srp_username:
self._shutdown(False)
raise TLSRemoteAlert(alert)
#If we're not in SRP callback mode, we won't have offered SRP
#without a username, so we shouldn't get this alert
if not srpCallback:
for result in self._sendError(\
AlertDescription.unexpected_message):
yield result
srpParams = srpCallback()
#If the callback returns None, cancel the handshake
if srpParams == None:
for result in self._sendError(AlertDescription.user_canceled):
yield result
#Recursively perform handshake
for result in self._handshakeClientAsyncHelper(srpParams,
None, None, None, None, settings, True):
yield result
return
#Get the server version. Do this before anything else, so any
#error alerts will use the server's version
self.version = serverHello.server_version
#Future responses from server must use this version
self._versionCheck = True
#Check ServerHello
if serverHello.server_version < settings.minVersion:
for result in self._sendError(\
AlertDescription.protocol_version,
"Too old version: %s" % str(serverHello.server_version)):
yield result
if serverHello.server_version > settings.maxVersion:
for result in self._sendError(\
AlertDescription.protocol_version,
"Too new version: %s" % str(serverHello.server_version)):
yield result
if serverHello.cipher_suite not in cipherSuites:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server responded with incorrect ciphersuite"):
yield result
if serverHello.certificate_type not in certificateTypes:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server responded with incorrect certificate type"):
yield result
if serverHello.compression_method != 0:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server responded with incorrect compression method"):
yield result
#Get the server nonce
serverRandom = serverHello.random
#If the server agrees to resume
if session and session.sessionID and \
serverHello.session_id == session.sessionID:
#If a shared-key, we're flexible about suites; otherwise the
#server-chosen suite has to match the session's suite
if sharedKeyParams:
session.cipherSuite = serverHello.cipher_suite
elif serverHello.cipher_suite != session.cipherSuite:
for result in self._sendError(\
AlertDescription.illegal_parameter,\
"Server's ciphersuite doesn't match session"):
yield result
#Set the session for this connection
self.session = session
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._getFinished():
yield result
for result in self._sendFinished():
yield result
#Mark the connection as open
self._handshakeDone(resumed=True)
#If server DOES NOT agree to resume
else:
if sharedKeyParams:
for result in self._sendError(\
AlertDescription.user_canceled,
"Was expecting a shared-key resumption"):
yield result
#We've already validated these
cipherSuite = serverHello.cipher_suite
certificateType = serverHello.certificate_type
#If the server chose an SRP suite...
if cipherSuite in CipherSuite.srpSuites:
#Get ServerKeyExchange, ServerHelloDone
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_key_exchange, cipherSuite):
if result in (0,1):
yield result
else:
break
serverKeyExchange = result
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_hello_done):
if result in (0,1):
yield result
else:
break
serverHelloDone = result
#If the server chose an SRP+RSA suite...
elif cipherSuite in CipherSuite.srpRsaSuites:
#Get Certificate, ServerKeyExchange, ServerHelloDone
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate, certificateType):
if result in (0,1):
yield result
else:
break
serverCertificate = result
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_key_exchange, cipherSuite):
if result in (0,1):
yield result
else:
break
serverKeyExchange = result
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_hello_done):
if result in (0,1):
yield result
else:
break
serverHelloDone = result
#If the server chose an RSA suite...
elif cipherSuite in CipherSuite.rsaSuites:
#Get Certificate[, CertificateRequest], ServerHelloDone
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate, certificateType):
if result in (0,1):
yield result
else:
break
serverCertificate = result
for result in self._getMsg(ContentType.handshake,
(HandshakeType.server_hello_done,
HandshakeType.certificate_request)):
if result in (0,1):
yield result
else:
break
msg = result
certificateRequest = None
if isinstance(msg, CertificateRequest):
certificateRequest = msg
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_hello_done):
if result in (0,1):
yield result
else:
break
serverHelloDone = result
elif isinstance(msg, ServerHelloDone):
serverHelloDone = msg
else:
raise AssertionError()
#Calculate SRP premaster secret, if server chose an SRP or
#SRP+RSA suite
if cipherSuite in CipherSuite.srpSuites + \
CipherSuite.srpRsaSuites:
#Get and check the server's group parameters and B value
N = serverKeyExchange.srp_N
g = serverKeyExchange.srp_g
s = serverKeyExchange.srp_s
B = serverKeyExchange.srp_B
if (g,N) not in goodGroupParameters:
for result in self._sendError(\
AlertDescription.untrusted_srp_parameters,
"Unknown group parameters"):
yield result
if numBits(N) < settings.minKeySize:
for result in self._sendError(\
AlertDescription.untrusted_srp_parameters,
"N value is too small: %d" % numBits(N)):
yield result
if numBits(N) > settings.maxKeySize:
for result in self._sendError(\
AlertDescription.untrusted_srp_parameters,
"N value is too large: %d" % numBits(N)):
yield result
if B % N == 0:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Suspicious B value"):
yield result
#Check the server's signature, if server chose an
#SRP+RSA suite
if cipherSuite in CipherSuite.srpRsaSuites:
#Hash ServerKeyExchange/ServerSRPParams
hashBytes = serverKeyExchange.hash(clientRandom,
serverRandom)
#Extract signature bytes from ServerKeyExchange
sigBytes = serverKeyExchange.signature
if len(sigBytes) == 0:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server sent an SRP ServerKeyExchange "\
"message without a signature"):
yield result
#Get server's public key from the Certificate message
for result in self._getKeyFromChain(serverCertificate,
settings):
if result in (0,1):
yield result
else:
break
publicKey, serverCertChain = result
#Verify signature
if not publicKey.verify(sigBytes, hashBytes):
for result in self._sendError(\
AlertDescription.decrypt_error,
"Signature failed to verify"):
yield result
#Calculate client's ephemeral DH values (a, A)
a = bytesToNumber(getRandomBytes(32))
A = powMod(g, a, N)
#Calculate client's static DH values (x, v)
x = makeX(bytesToString(s), srpUsername, password)
v = powMod(g, x, N)
#Calculate u
u = makeU(N, A, B)
#Calculate premaster secret
k = makeK(N, g)
S = powMod((B - (k*v)) % N, a+(u*x), N)
if self.fault == Fault.badA:
A = N
S = 0
premasterSecret = numberToBytes(S)
#Send ClientKeyExchange
for result in self._sendMsg(\
ClientKeyExchange(cipherSuite).createSRP(A)):
yield result
#Calculate RSA premaster secret, if server chose an RSA suite
elif cipherSuite in CipherSuite.rsaSuites:
#Handle the presence of a CertificateRequest
if certificateRequest:
if unknownParams and certCallback:
certParamsNew = certCallback()
if certParamsNew:
clientCertChain, privateKey = certParamsNew
#Get server's public key from the Certificate message
for result in self._getKeyFromChain(serverCertificate,
settings):
if result in (0,1):
yield result
else:
break
publicKey, serverCertChain = result
#Calculate premaster secret
premasterSecret = getRandomBytes(48)
premasterSecret[0] = settings.maxVersion[0]
premasterSecret[1] = settings.maxVersion[1]
if self.fault == Fault.badPremasterPadding:
premasterSecret[0] = 5
if self.fault == Fault.shortPremasterSecret:
premasterSecret = premasterSecret[:-1]
#Encrypt premaster secret to server's public key
encryptedPreMasterSecret = publicKey.encrypt(premasterSecret)
#If client authentication was requested, send Certificate
#message, either with certificates or empty
if certificateRequest:
clientCertificate = Certificate(certificateType)
if clientCertChain:
#Check to make sure we have the same type of
#certificates the server requested
wrongType = False
if certificateType == CertificateType.x509:
if not isinstance(clientCertChain, X509CertChain):
wrongType = True
elif certificateType == CertificateType.cryptoID:
if not isinstance(clientCertChain,
cryptoIDlib.CertChain.CertChain):
wrongType = True
if wrongType:
for result in self._sendError(\
AlertDescription.handshake_failure,
"Client certificate is of wrong type"):
yield result
clientCertificate.create(clientCertChain)
for result in self._sendMsg(clientCertificate):
yield result
else:
#The server didn't request client auth, so we
#zeroize these so the clientCertChain won't be
#stored in the session.
privateKey = None
clientCertChain = None
#Send ClientKeyExchange
clientKeyExchange = ClientKeyExchange(cipherSuite,
self.version)
clientKeyExchange.createRSA(encryptedPreMasterSecret)
for result in self._sendMsg(clientKeyExchange):
yield result
#If client authentication was requested and we have a
#private key, send CertificateVerify
if certificateRequest and privateKey:
if self.version == (3,0):
#Create a temporary session object, just for the
#purpose of creating the CertificateVerify
session = Session()
session._calcMasterSecret(self.version,
premasterSecret,
clientRandom,
serverRandom)
verifyBytes = self._calcSSLHandshakeHash(\
session.masterSecret, "")
elif self.version in ((3,1), (3,2)):
verifyBytes = stringToBytes(\
self._handshake_md5.digest() + \
self._handshake_sha.digest())
if self.fault == Fault.badVerifyMessage:
verifyBytes[0] = ((verifyBytes[0]+1) % 256)
signedBytes = privateKey.sign(verifyBytes)
certificateVerify = CertificateVerify()
certificateVerify.create(signedBytes)
for result in self._sendMsg(certificateVerify):
yield result
#Create the session object
self.session = Session()
self.session._calcMasterSecret(self.version, premasterSecret,
clientRandom, serverRandom)
self.session.sessionID = serverHello.session_id
self.session.cipherSuite = cipherSuite
self.session.srpUsername = srpUsername
self.session.clientCertChain = clientCertChain
self.session.serverCertChain = serverCertChain
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._sendFinished():
yield result
for result in self._getFinished():
yield result
#Mark the connection as open
self.session._setResumable(True)
self._handshakeDone(resumed=False)
def handshakeServer(self, sharedKeyDB=None, verifierDB=None,
certChain=None, privateKey=None, reqCert=False,
sessionCache=None, settings=None, checker=None):
"""Perform a handshake in the role of server.
This function performs an SSL or TLS handshake. Depending on
the arguments and the behavior of the client, this function can
perform a shared-key, SRP, or certificate-based handshake. It
can also perform a combined SRP and server-certificate
handshake.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
This function does not send a Hello Request message before
performing the handshake, so if re-handshaking is required,
the server must signal the client to begin the re-handshake
through some other means.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type sharedKeyDB: L{tlslite.SharedKeyDB.SharedKeyDB}
@param sharedKeyDB: A database of shared symmetric keys
associated with usernames. If the client performs a
shared-key handshake, the session's sharedKeyUsername
attribute will be set.
@type verifierDB: L{tlslite.VerifierDB.VerifierDB}
@param verifierDB: A database of SRP password verifiers
associated with usernames. If the client performs an SRP
handshake, the session's srpUsername attribute will be set.
@type certChain: L{tlslite.X509CertChain.X509CertChain} or
L{cryptoIDlib.CertChain.CertChain}
@param certChain: The certificate chain to be used if the
client requests server certificate authentication.
@type privateKey: L{tlslite.utils.RSAKey.RSAKey}
@param privateKey: The private key to be used if the client
requests server certificate authentication.
@type reqCert: bool
@param reqCert: Whether to request client certificate
authentication. This only applies if the client chooses server
certificate authentication; if the client chooses SRP or
shared-key authentication, this will be ignored. If the client
performs a client certificate authentication, the sessions's
clientCertChain attribute will be set.
@type sessionCache: L{tlslite.SessionCache.SessionCache}
@param sessionCache: An in-memory cache of resumable sessions.
The client can resume sessions from this cache. Alternatively,
if the client performs a full handshake, a new session will be
added to the cache.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites and SSL/TLS version chosen by the server.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
for result in self.handshakeServerAsync(sharedKeyDB, verifierDB,
certChain, privateKey, reqCert, sessionCache, settings,
checker):
pass
def handshakeServerAsync(self, sharedKeyDB=None, verifierDB=None,
certChain=None, privateKey=None, reqCert=False,
sessionCache=None, settings=None, checker=None):
"""Start a server handshake operation on the TLS connection.
This function returns a generator which behaves similarly to
handshakeServer(). Successive invocations of the generator
will return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or it will raise StopIteration
if the handshake operation is complete.
@rtype: iterable
@return: A generator; see above for details.
"""
handshaker = self._handshakeServerAsyncHelper(\
sharedKeyDB=sharedKeyDB,
verifierDB=verifierDB, certChain=certChain,
privateKey=privateKey, reqCert=reqCert,
sessionCache=sessionCache, settings=settings)
for result in self._handshakeWrapperAsync(handshaker, checker):
yield result
def _handshakeServerAsyncHelper(self, sharedKeyDB, verifierDB,
certChain, privateKey, reqCert, sessionCache,
settings):
self._handshakeStart(client=False)
if (not sharedKeyDB) and (not verifierDB) and (not certChain):
raise ValueError("Caller passed no authentication credentials")
if certChain and not privateKey:
raise ValueError("Caller passed a certChain but no privateKey")
if privateKey and not certChain:
raise ValueError("Caller passed a privateKey but no certChain")
if not settings:
settings = HandshakeSettings()
settings = settings._filter()
#Initialize acceptable cipher suites
cipherSuites = []
if verifierDB:
if certChain:
cipherSuites += \
CipherSuite.getSrpRsaSuites(settings.cipherNames)
cipherSuites += CipherSuite.getSrpSuites(settings.cipherNames)
if sharedKeyDB or certChain:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
#Initialize acceptable certificate type
certificateType = None
if certChain:
try:
import cryptoIDlib.CertChain
if isinstance(certChain, cryptoIDlib.CertChain.CertChain):
certificateType = CertificateType.cryptoID
except ImportError:
pass
if isinstance(certChain, X509CertChain):
certificateType = CertificateType.x509
if certificateType == None:
raise ValueError("Unrecognized certificate type")
#Initialize locals
clientCertChain = None
serverCertChain = None #We may set certChain to this later
postFinishedError = None
#Tentatively set version to most-desirable version, so if an error
#occurs parsing the ClientHello, this is what we'll use for the
#error alert
self.version = settings.maxVersion
#Get ClientHello
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_hello):
if result in (0,1):
yield result
else:
break
clientHello = result
#If client's version is too low, reject it
if clientHello.client_version < settings.minVersion:
self.version = settings.minVersion
for result in self._sendError(\
AlertDescription.protocol_version,
"Too old version: %s" % str(clientHello.client_version)):
yield result
#If client's version is too high, propose my highest version
elif clientHello.client_version > settings.maxVersion:
self.version = settings.maxVersion
else:
#Set the version to the client's version
self.version = clientHello.client_version
#Get the client nonce; create server nonce
clientRandom = clientHello.random
serverRandom = getRandomBytes(32)
#Calculate the first cipher suite intersection.
#This is the 'privileged' ciphersuite. We'll use it if we're
#doing a shared-key resumption or a new negotiation. In fact,
#the only time we won't use it is if we're resuming a non-sharedkey
#session, in which case we use the ciphersuite from the session.
#
#Given the current ciphersuite ordering, this means we prefer SRP
#over non-SRP.
for cipherSuite in cipherSuites:
if cipherSuite in clientHello.cipher_suites:
break
else:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
#If resumption was requested...
if clientHello.session_id and (sharedKeyDB or sessionCache):
session = None
#Check in the sharedKeys container
if sharedKeyDB and len(clientHello.session_id)==16:
try:
#Trim off zero padding, if any
for x in range(16):
if clientHello.session_id[x]==0:
break
self.allegedSharedKeyUsername = bytesToString(\
clientHello.session_id[:x])
session = sharedKeyDB[self.allegedSharedKeyUsername]
if not session.sharedKey:
raise AssertionError()
#use privileged ciphersuite
session.cipherSuite = cipherSuite
except KeyError:
pass
#Then check in the session cache
if sessionCache and not session:
try:
session = sessionCache[bytesToString(\
clientHello.session_id)]
if session.sharedKey:
raise AssertionError()
if not session.resumable:
raise AssertionError()
#Check for consistency with ClientHello
if session.cipherSuite not in cipherSuites:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
if session.cipherSuite not in clientHello.cipher_suites:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
if clientHello.srp_username:
if clientHello.srp_username != session.srpUsername:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
except KeyError:
pass
#If a session is found..
if session:
#Set the session
self.session = session
#Send ServerHello
serverHello = ServerHello()
serverHello.create(self.version, serverRandom,
session.sessionID, session.cipherSuite,
certificateType)
for result in self._sendMsg(serverHello):
yield result
#From here on, the client's messages must have the right version
self._versionCheck = True
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._sendFinished():
yield result
for result in self._getFinished():
yield result
#Mark the connection as open
self._handshakeDone(resumed=True)
return
#If not a resumption...
#TRICKY: we might have chosen an RSA suite that was only deemed
#acceptable because of the shared-key resumption. If the shared-
#key resumption failed, because the identifier wasn't recognized,
#we might fall through to here, where we have an RSA suite
#chosen, but no certificate.
if cipherSuite in CipherSuite.rsaSuites and not certChain:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
#If an RSA suite is chosen, check for certificate type intersection
#(We do this check down here because if the mismatch occurs but the
# client is using a shared-key session, it's okay)
if cipherSuite in CipherSuite.rsaSuites + \
CipherSuite.srpRsaSuites:
if certificateType not in clientHello.certificate_types:
for result in self._sendError(\
AlertDescription.handshake_failure,
"the client doesn't support my certificate type"):
yield result
#Move certChain -> serverCertChain, now that we're using it
serverCertChain = certChain
#Create sessionID
if sessionCache:
sessionID = getRandomBytes(32)
else:
sessionID = createByteArraySequence([])
#If we've selected an SRP suite, exchange keys and calculate
#premaster secret:
if cipherSuite in CipherSuite.srpSuites + CipherSuite.srpRsaSuites:
#If there's no SRP username...
if not clientHello.srp_username:
#Ask the client to re-send ClientHello with one
for result in self._sendMsg(Alert().create(\
AlertDescription.missing_srp_username,
AlertLevel.warning)):
yield result
#Get ClientHello
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_hello):
if result in (0,1):
yield result
else:
break
clientHello = result
#Check ClientHello
#If client's version is too low, reject it (COPIED CODE; BAD!)
if clientHello.client_version < settings.minVersion:
self.version = settings.minVersion
for result in self._sendError(\
AlertDescription.protocol_version,
"Too old version: %s" % str(clientHello.client_version)):
yield result
#If client's version is too high, propose my highest version
elif clientHello.client_version > settings.maxVersion:
self.version = settings.maxVersion
else:
#Set the version to the client's version
self.version = clientHello.client_version
#Recalculate the privileged cipher suite, making sure to
#pick an SRP suite
cipherSuites = [c for c in cipherSuites if c in \
CipherSuite.srpSuites + \
CipherSuite.srpRsaSuites]
for cipherSuite in cipherSuites:
if cipherSuite in clientHello.cipher_suites:
break
else:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
#Get the client nonce; create server nonce
clientRandom = clientHello.random
serverRandom = getRandomBytes(32)
#The username better be there, this time
if not clientHello.srp_username:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Client resent a hello, but without the SRP"\
" username"):
yield result
#Get username
self.allegedSrpUsername = clientHello.srp_username
#Get parameters from username
try:
entry = verifierDB[self.allegedSrpUsername]
except KeyError:
for result in self._sendError(\
AlertDescription.unknown_srp_username):
yield result
(N, g, s, v) = entry
#Calculate server's ephemeral DH values (b, B)
b = bytesToNumber(getRandomBytes(32))
k = makeK(N, g)
B = (powMod(g, b, N) + (k*v)) % N
#Create ServerKeyExchange, signing it if necessary
serverKeyExchange = ServerKeyExchange(cipherSuite)
serverKeyExchange.createSRP(N, g, stringToBytes(s), B)
if cipherSuite in CipherSuite.srpRsaSuites:
hashBytes = serverKeyExchange.hash(clientRandom,
serverRandom)
serverKeyExchange.signature = privateKey.sign(hashBytes)
#Send ServerHello[, Certificate], ServerKeyExchange,
#ServerHelloDone
msgs = []
serverHello = ServerHello()
serverHello.create(self.version, serverRandom, sessionID,
cipherSuite, certificateType)
msgs.append(serverHello)
if cipherSuite in CipherSuite.srpRsaSuites:
certificateMsg = Certificate(certificateType)
certificateMsg.create(serverCertChain)
msgs.append(certificateMsg)
msgs.append(serverKeyExchange)
msgs.append(ServerHelloDone())
for result in self._sendMsgs(msgs):
yield result
#From here on, the client's messages must have the right version
self._versionCheck = True
#Get and check ClientKeyExchange
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_key_exchange,
cipherSuite):
if result in (0,1):
yield result
else:
break
clientKeyExchange = result
A = clientKeyExchange.srp_A
if A % N == 0:
postFinishedError = (AlertDescription.illegal_parameter,
"Suspicious A value")
#Calculate u
u = makeU(N, A, B)
#Calculate premaster secret
S = powMod((A * powMod(v,u,N)) % N, b, N)
premasterSecret = numberToBytes(S)
#If we've selected an RSA suite, exchange keys and calculate
#premaster secret:
elif cipherSuite in CipherSuite.rsaSuites:
#Send ServerHello, Certificate[, CertificateRequest],
#ServerHelloDone
msgs = []
msgs.append(ServerHello().create(self.version, serverRandom,
sessionID, cipherSuite, certificateType))
msgs.append(Certificate(certificateType).create(serverCertChain))
if reqCert:
msgs.append(CertificateRequest())
msgs.append(ServerHelloDone())
for result in self._sendMsgs(msgs):
yield result
#From here on, the client's messages must have the right version
self._versionCheck = True
#Get [Certificate,] (if was requested)
if reqCert:
if self.version == (3,0):
for result in self._getMsg((ContentType.handshake,
ContentType.alert),
HandshakeType.certificate,
certificateType):
if result in (0,1):
yield result
else:
break
msg = result
if isinstance(msg, Alert):
#If it's not a no_certificate alert, re-raise
alert = msg
if alert.description != \
AlertDescription.no_certificate:
self._shutdown(False)
raise TLSRemoteAlert(alert)
elif isinstance(msg, Certificate):
clientCertificate = msg
if clientCertificate.certChain and \
clientCertificate.certChain.getNumCerts()!=0:
clientCertChain = clientCertificate.certChain
else:
raise AssertionError()
elif self.version in ((3,1), (3,2)):
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate,
certificateType):
if result in (0,1):
yield result
else:
break
clientCertificate = result
if clientCertificate.certChain and \
clientCertificate.certChain.getNumCerts()!=0:
clientCertChain = clientCertificate.certChain
else:
raise AssertionError()
#Get ClientKeyExchange
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_key_exchange,
cipherSuite):
if result in (0,1):
yield result
else:
break
clientKeyExchange = result
#Decrypt ClientKeyExchange
premasterSecret = privateKey.decrypt(\
clientKeyExchange.encryptedPreMasterSecret)
randomPreMasterSecret = getRandomBytes(48)
versionCheck = (premasterSecret[0], premasterSecret[1])
if not premasterSecret:
premasterSecret = randomPreMasterSecret
elif len(premasterSecret)!=48:
premasterSecret = randomPreMasterSecret
elif versionCheck != clientHello.client_version:
if versionCheck != self.version: #Tolerate buggy IE clients
premasterSecret = randomPreMasterSecret
#Get and check CertificateVerify, if relevant
if clientCertChain:
if self.version == (3,0):
#Create a temporary session object, just for the purpose
#of checking the CertificateVerify
session = Session()
session._calcMasterSecret(self.version, premasterSecret,
clientRandom, serverRandom)
verifyBytes = self._calcSSLHandshakeHash(\
session.masterSecret, "")
elif self.version in ((3,1), (3,2)):
verifyBytes = stringToBytes(self._handshake_md5.digest() +\
self._handshake_sha.digest())
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate_verify):
if result in (0,1):
yield result
else:
break
certificateVerify = result
publicKey = clientCertChain.getEndEntityPublicKey()
if len(publicKey) < settings.minKeySize:
postFinishedError = (AlertDescription.handshake_failure,
"Client's public key too small: %d" % len(publicKey))
if len(publicKey) > settings.maxKeySize:
postFinishedError = (AlertDescription.handshake_failure,
"Client's public key too large: %d" % len(publicKey))
if not publicKey.verify(certificateVerify.signature,
verifyBytes):
postFinishedError = (AlertDescription.decrypt_error,
"Signature failed to verify")
#Create the session object
self.session = Session()
self.session._calcMasterSecret(self.version, premasterSecret,
clientRandom, serverRandom)
self.session.sessionID = sessionID
self.session.cipherSuite = cipherSuite
self.session.srpUsername = self.allegedSrpUsername
self.session.clientCertChain = clientCertChain
self.session.serverCertChain = serverCertChain
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._getFinished():
yield result
#If we were holding a post-finished error until receiving the client
#finished message, send it now. We delay the call until this point
#because calling sendError() throws an exception, and our caller might
#shut down the socket upon receiving the exception. If he did, and the
#client was still sending its ChangeCipherSpec or Finished messages, it
#would cause a socket error on the client side. This is a lot of
#consideration to show to misbehaving clients, but this would also
#cause problems with fault-testing.
if postFinishedError:
for result in self._sendError(*postFinishedError):
yield result
for result in self._sendFinished():
yield result
#Add the session object to the session cache
if sessionCache and sessionID:
sessionCache[bytesToString(sessionID)] = self.session
#Mark the connection as open
self.session._setResumable(True)
self._handshakeDone(resumed=False)
def _handshakeWrapperAsync(self, handshaker, checker):
if not self.fault:
try:
for result in handshaker:
yield result
if checker:
try:
checker(self)
except TLSAuthenticationError:
alert = Alert().create(AlertDescription.close_notify,
AlertLevel.fatal)
for result in self._sendMsg(alert):
yield result
raise
except:
self._shutdown(False)
raise
else:
try:
for result in handshaker:
yield result
if checker:
try:
checker(self)
except TLSAuthenticationError:
alert = Alert().create(AlertDescription.close_notify,
AlertLevel.fatal)
for result in self._sendMsg(alert):
yield result
raise
except socket.error, e:
raise TLSFaultError("socket error!")
except TLSAbruptCloseError, e:
raise TLSFaultError("abrupt close error!")
except TLSAlert, alert:
if alert.description not in Fault.faultAlerts[self.fault]:
raise TLSFaultError(str(alert))
else:
pass
except:
self._shutdown(False)
raise
else:
raise TLSFaultError("No error!")
def _getKeyFromChain(self, certificate, settings):
#Get and check cert chain from the Certificate message
certChain = certificate.certChain
if not certChain or certChain.getNumCerts() == 0:
for result in self._sendError(AlertDescription.illegal_parameter,
"Other party sent a Certificate message without "\
"certificates"):
yield result
#Get and check public key from the cert chain
publicKey = certChain.getEndEntityPublicKey()
if len(publicKey) < settings.minKeySize:
for result in self._sendError(AlertDescription.handshake_failure,
"Other party's public key too small: %d" % len(publicKey)):
yield result
if len(publicKey) > settings.maxKeySize:
for result in self._sendError(AlertDescription.handshake_failure,
"Other party's public key too large: %d" % len(publicKey)):
yield result
yield publicKey, certChain
| agpl-3.0 | 3,248,366,208,112,861,000 | 42.966875 | 83 | 0.571936 | false |
azumimuo/family-xbmc-addon | plugin.video.exodus/resources/lib/sources/tunemovie.py | 7 | 6553 | # -*- coding: utf-8 -*-
'''
Exodus Add-on
Copyright (C) 2016 Exodus
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,json,base64
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import directstream
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['tunemovies.to', 'tunemovie.tv']
self.base_link = 'https://tunemovies.to'
self.search_link = '/search/%s.html'
def movie(self, imdb, title, localtitle, aliases, year):
try:
query = urlparse.urljoin(self.base_link, self.search_link)
query = query % urllib.quote_plus(title)
t = cleantitle.get(title)
r = client.request(query)
r = client.parseDOM(r, 'div', attrs = {'class': 'thumb'})
r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title'), re.findall('(\d{4})', i)) for i in r]
r = [(i[0][0], i[1][0], i[2][0]) for i in r if len(i[0]) > 0 and len(i[1]) > 0 and len(i[2]) > 0]
url = [i[0] for i in r if t in cleantitle.get(i[1]) and year == i[2]][0]
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
query = urlparse.urljoin(self.base_link, self.search_link)
query = query % urllib.quote_plus(data['tvshowtitle'])
t = cleantitle.get(data['tvshowtitle'])
r = client.request(query)
r = client.parseDOM(r, 'div', attrs = {'class': 'thumb'})
r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title'), re.findall('(\d{4})', i)) for i in r]
r = [(i[0][0], i[1][0], i[2][0]) for i in r if len(i[0]) > 0 and len(i[1]) > 0 and len(i[2]) > 0]
url = [i[0] for i in r if t in cleantitle.get(i[1]) and ('Season %s' % season) in i[1]][0]
url += '?episode=%01d' % int(episode)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
url = urlparse.urljoin(self.base_link, url)
try:
url, episode = re.findall('(.+?)\?episode=(\d*)$', url)[0]
except:
episode = None
ref = url
for i in range(3):
result = client.request(url)
if not result == None: break
if not episode == None:
result = client.parseDOM(result, 'div', attrs = {'id': 'ip_episode'})[0]
ep_url = client.parseDOM(result, 'a', attrs = {'data-name': str(episode)}, ret='href')[0]
for i in range(3):
result = client.request(ep_url)
if not result == None: break
r = client.parseDOM(result, 'div', attrs = {'class': '[^"]*server_line[^"]*'})
for u in r:
try:
url = urlparse.urljoin(self.base_link, '/ip.file/swf/plugins/ipplugins.php')
p1 = client.parseDOM(u, 'a', ret='data-film')[0]
p2 = client.parseDOM(u, 'a', ret='data-server')[0]
p3 = client.parseDOM(u, 'a', ret='data-name')[0]
post = {'ipplugins': 1, 'ip_film': p1, 'ip_server': p2, 'ip_name': p3}
post = urllib.urlencode(post)
for i in range(3):
result = client.request(url, post=post, XHR=True, referer=ref, timeout='10')
if not result == None: break
result = json.loads(result)
u = result['s']
s = result['v']
url = urlparse.urljoin(self.base_link, '/ip.file/swf/ipplayer/ipplayer.php')
for n in range(3):
try:
post = {'u': u, 'w': '100%', 'h': '420', 's': s, 'n': n}
post = urllib.urlencode(post)
result = client.request(url, post=post, XHR=True, referer=ref)
src = json.loads(result)['data']
if type(src) is list:
src = [i['files'] for i in src]
for i in src:
try:
sources.append({'source': 'gvideo', 'quality': directstream.googletag(i)[0]['quality'], 'language': 'en', 'url': i, 'direct': True, 'debridonly': False})
except:
pass
else:
src = client.request(src)
src = client.parseDOM(src, 'source', ret='src', attrs = {'type': 'video.+?'})[0]
src += '|%s' % urllib.urlencode({'User-agent': client.randomagent()})
sources.append({'source': 'cdn', 'quality': 'HD', 'language': 'en', 'url': src, 'direct': False, 'debridonly': False})
except:
pass
except:
pass
return sources
except:
return sources
def resolve(self, url):
return directstream.googlepass(url)
| gpl-2.0 | -7,133,088,544,431,839,000 | 38.239521 | 193 | 0.491988 | false |
rwatson/chromium-capsicum | third_party/scons/scons-local/SCons/Tool/lex.py | 3 | 3285 | """SCons.Tool.lex
Tool-specific initialization for lex.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/lex.py 3897 2009/01/13 06:45:54 scons"
import os.path
import string
import SCons.Action
import SCons.Tool
import SCons.Util
LexAction = SCons.Action.Action("$LEXCOM", "$LEXCOMSTR")
def lexEmitter(target, source, env):
sourceBase, sourceExt = os.path.splitext(SCons.Util.to_String(source[0]))
if sourceExt == ".lm": # If using Objective-C
target = [sourceBase + ".m"] # the extension is ".m".
# This emitter essentially tries to add to the target all extra
# files generated by flex.
# Different options that are used to trigger the creation of extra files.
fileGenOptions = ["--header-file=", "--tables-file="]
lexflags = env.subst("$LEXFLAGS", target=target, source=source)
for option in SCons.Util.CLVar(lexflags):
for fileGenOption in fileGenOptions:
l = len(fileGenOption)
if option[:l] == fileGenOption:
# A file generating option is present, so add the
# file name to the target list.
fileName = string.strip(option[l:])
target.append(fileName)
return (target, source)
def generate(env):
"""Add Builders and construction variables for lex to an Environment."""
c_file, cxx_file = SCons.Tool.createCFileBuilders(env)
# C
c_file.add_action(".l", LexAction)
c_file.add_emitter(".l", lexEmitter)
c_file.add_action(".lex", LexAction)
c_file.add_emitter(".lex", lexEmitter)
# Objective-C
cxx_file.add_action(".lm", LexAction)
cxx_file.add_emitter(".lm", lexEmitter)
# C++
cxx_file.add_action(".ll", LexAction)
cxx_file.add_emitter(".ll", lexEmitter)
env["LEX"] = env.Detect("flex") or "lex"
env["LEXFLAGS"] = SCons.Util.CLVar("")
env["LEXCOM"] = "$LEX $LEXFLAGS -t $SOURCES > $TARGET"
def exists(env):
return env.Detect(["flex", "lex"])
| bsd-3-clause | -7,175,742,991,458,954,000 | 34.322581 | 89 | 0.69102 | false |
czchen/debian-pgcli | pgcli/packages/expanded.py | 2 | 1174 | from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
total_len = header_len + data_len + 1
sep = u"-[ RECORD {0} ]".format(num)
if len(sep) < header_len:
sep = pad(sep, header_len - 1, u"-") + u"+"
if len(sep) < total_len:
sep = pad(sep, total_len, u"-")
return sep + u"\n"
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
| bsd-3-clause | -3,052,752,594,920,172,500 | 26.302326 | 66 | 0.560477 | false |
spadae22/odoo | addons/procurement/__openerp__.py | 267 | 2661 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Procurements',
'version' : '1.0',
'author' : 'OpenERP SA',
'website': 'https://www.odoo.com/page/manufacturing',
'category' : 'Hidden/Dependency',
'depends' : ['base', 'product'],
'description': """
This is the module for computing Procurements.
==============================================
This procurement module only depends on the product module and is not useful
on itself. Procurements represent needs that need to be solved by a procurement
rule. When a procurement is created, it is confirmed. When a rule is found,
it will be put in running state. After, it will check if what needed to be done
for the rule has been executed. Then it will go to the done state. A procurement
can also go into exception, for example when it can not find a rule and it can be cancelled.
The mechanism will be extended by several modules. The procurement rule of stock will
create a move and the procurement will be fulfilled when the move is done.
The procurement rule of sale_service will create a task. Those of purchase or
mrp will create a purchase order or a manufacturing order.
The scheduler will check if it can assign a rule to confirmed procurements and if
it can put running procurements to done.
Procurements in exception should be checked manually and can be re-run.
""",
'data': [
'security/ir.model.access.csv',
'security/procurement_security.xml',
'procurement_data.xml',
'wizard/schedulers_all_view.xml',
'procurement_view.xml',
'company_view.xml',
],
'demo': [],
'test': ['test/procurement.yml'],
'installable': True,
'auto_install': True,
}
| agpl-3.0 | 7,738,974,027,935,920,000 | 41.238095 | 92 | 0.650507 | false |
LuqueDaniel/ninja-ide | ninja_ide/gui/editor/checkers/__init__.py | 7 | 1735 | # -*- coding: utf-8 -*-
#
# This file is part of NINJA-IDE (http://ninja-ide.org).
#
# NINJA-IDE is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# any later version.
#
# NINJA-IDE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NINJA-IDE; If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from __future__ import unicode_literals
NOTIFICATIONS_CHECKERS = {}
def register_checker(lang='python', checker=None, color=None, priority=1):
"""Register a Checker (Like PEP8, Lint, etc) for some language.
@lang: language that the checker apply.
@checker: Class to be instantiated.
@color: the color that this checker will use.
@priority: the priority of this checker (1=LOW, >1 = HIGH...)"""
global NOTIFICATIONS_CHECKERS
checkers = NOTIFICATIONS_CHECKERS.get(lang, [])
checkers.append((checker, color, priority))
NOTIFICATIONS_CHECKERS[lang] = checkers
def remove_checker(checker):
global NOTIFICATIONS_CHECKERS
checkers = NOTIFICATIONS_CHECKERS.get('python', [])
if checker in checkers:
checkers.remove(checker)
NOTIFICATIONS_CHECKERS['python'] = checkers
def get_checkers_for(lang='python'):
"""Get a registered checker for some language."""
global NOTIFICATIONS_CHECKERS
return NOTIFICATIONS_CHECKERS.get(lang, []) | gpl-3.0 | -8,535,606,142,940,938,000 | 35.93617 | 74 | 0.721037 | false |
primoz-k/cookiecutter-django | {{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/contrib/sites/migrations/0001_initial.py | 348 | 1025 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.contrib.sites.models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)),
('domain', models.CharField(verbose_name='domain name', max_length=100, validators=[django.contrib.sites.models._simple_domain_name_validator])),
('name', models.CharField(verbose_name='display name', max_length=50)),
],
options={
'verbose_name_plural': 'sites',
'verbose_name': 'site',
'db_table': 'django_site',
'ordering': ('domain',),
},
managers=[
(b'objects', django.contrib.sites.models.SiteManager()),
],
),
]
| bsd-3-clause | -4,517,479,185,735,276,500 | 32.064516 | 161 | 0.545366 | false |
slightlymadphoenix/activityPointsApp | activitypoints/lib/python3.5/site-packages/django/db/backends/mysql/base.py | 44 | 16091 | """
MySQL database backend for Django.
Requires mysqlclient: https://pypi.python.org/pypi/mysqlclient/
MySQLdb is supported for Python 2 only: http://sourceforge.net/projects/mysql-python
"""
from __future__ import unicode_literals
import datetime
import re
import sys
import warnings
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db import utils
from django.db.backends import utils as backend_utils
from django.db.backends.base.base import BaseDatabaseWrapper
from django.utils import six, timezone
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_str
from django.utils.functional import cached_property
from django.utils.safestring import SafeBytes, SafeText
try:
import MySQLdb as Database
except ImportError as e:
raise ImproperlyConfigured(
'Error loading MySQLdb module: %s.\n'
'Did you install mysqlclient or MySQL-python?' % e
)
from MySQLdb.constants import CLIENT, FIELD_TYPE # isort:skip
from MySQLdb.converters import Thing2Literal, conversions # isort:skip
# Some of these import MySQLdb, so import them after checking if it's installed.
from .client import DatabaseClient # isort:skip
from .creation import DatabaseCreation # isort:skip
from .features import DatabaseFeatures # isort:skip
from .introspection import DatabaseIntrospection # isort:skip
from .operations import DatabaseOperations # isort:skip
from .schema import DatabaseSchemaEditor # isort:skip
from .validation import DatabaseValidation # isort:skip
version = Database.version_info
if version < (1, 2, 3):
raise ImproperlyConfigured(
"MySQLdb/mysqlclient 1.2.3 or newer is required; you have %s"
% Database.__version__
)
def adapt_datetime_warn_on_aware_datetime(value, conv):
# Remove this function and rely on the default adapter in Django 2.0.
if settings.USE_TZ and timezone.is_aware(value):
warnings.warn(
"The MySQL database adapter received an aware datetime (%s), "
"probably from cursor.execute(). Update your code to pass a "
"naive datetime in the database connection's time zone (UTC by "
"default).", RemovedInDjango20Warning)
# This doesn't account for the database connection's timezone,
# which isn't known. (That's why this adapter is deprecated.)
value = value.astimezone(timezone.utc).replace(tzinfo=None)
return Thing2Literal(value.strftime("%Y-%m-%d %H:%M:%S.%f"), conv)
# MySQLdb returns TIME columns as timedelta -- they are more like timedelta in
# terms of actual behavior as they are signed and include days -- and Django
# expects time, so we still need to override that. We also need to add special
# handling for SafeText and SafeBytes as MySQLdb's type checking is too tight
# to catch those (see Django ticket #6052).
django_conversions = conversions.copy()
django_conversions.update({
FIELD_TYPE.TIME: backend_utils.typecast_time,
FIELD_TYPE.DECIMAL: backend_utils.typecast_decimal,
FIELD_TYPE.NEWDECIMAL: backend_utils.typecast_decimal,
datetime.datetime: adapt_datetime_warn_on_aware_datetime,
})
# This should match the numerical portion of the version numbers (we can treat
# versions like 5.0.24 and 5.0.24a as the same).
server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})')
class CursorWrapper(object):
"""
A thin wrapper around MySQLdb's normal cursor class so that we can catch
particular exception instances and reraise them with the right types.
Implemented as a wrapper, rather than a subclass, so that we aren't stuck
to the particular underlying representation returned by Connection.cursor().
"""
codes_for_integrityerror = (1048,)
def __init__(self, cursor):
self.cursor = cursor
def execute(self, query, args=None):
try:
# args is None means no string interpolation
return self.cursor.execute(query, args)
except Database.OperationalError as e:
# Map some error codes to IntegrityError, since they seem to be
# misclassified and Django would prefer the more logical place.
if e.args[0] in self.codes_for_integrityerror:
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
def executemany(self, query, args):
try:
return self.cursor.executemany(query, args)
except Database.OperationalError as e:
# Map some error codes to IntegrityError, since they seem to be
# misclassified and Django would prefer the more logical place.
if e.args[0] in self.codes_for_integrityerror:
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.cursor, attr)
def __iter__(self):
return iter(self.cursor)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
# Close instead of passing through to avoid backend-specific behavior
# (#17671).
self.close()
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'mysql'
# This dictionary maps Field objects to their associated MySQL column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
_data_types = {
'AutoField': 'integer AUTO_INCREMENT',
'BigAutoField': 'bigint AUTO_INCREMENT',
'BinaryField': 'longblob',
'BooleanField': 'bool',
'CharField': 'varchar(%(max_length)s)',
'CommaSeparatedIntegerField': 'varchar(%(max_length)s)',
'DateField': 'date',
'DateTimeField': 'datetime',
'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)',
'DurationField': 'bigint',
'FileField': 'varchar(%(max_length)s)',
'FilePathField': 'varchar(%(max_length)s)',
'FloatField': 'double precision',
'IntegerField': 'integer',
'BigIntegerField': 'bigint',
'IPAddressField': 'char(15)',
'GenericIPAddressField': 'char(39)',
'NullBooleanField': 'bool',
'OneToOneField': 'integer',
'PositiveIntegerField': 'integer UNSIGNED',
'PositiveSmallIntegerField': 'smallint UNSIGNED',
'SlugField': 'varchar(%(max_length)s)',
'SmallIntegerField': 'smallint',
'TextField': 'longtext',
'TimeField': 'time',
'UUIDField': 'char(32)',
}
@cached_property
def data_types(self):
if self.features.supports_microsecond_precision:
return dict(self._data_types, DateTimeField='datetime(6)', TimeField='time(6)')
else:
return self._data_types
operators = {
'exact': '= %s',
'iexact': 'LIKE %s',
'contains': 'LIKE BINARY %s',
'icontains': 'LIKE %s',
'regex': 'REGEXP BINARY %s',
'iregex': 'REGEXP %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE BINARY %s',
'endswith': 'LIKE BINARY %s',
'istartswith': 'LIKE %s',
'iendswith': 'LIKE %s',
}
# The patterns below are used to generate SQL pattern lookup clauses when
# the right-hand side of the lookup isn't a raw string (it might be an expression
# or the result of a bilateral transformation).
# In those cases, special characters for LIKE operators (e.g. \, *, _) should be
# escaped on database side.
#
# Note: we use str.format() here for readability as '%' is used as a wildcard for
# the LIKE operator.
pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\\', '\\\\'), '%%', '\%%'), '_', '\_')"
pattern_ops = {
'contains': "LIKE BINARY CONCAT('%%', {}, '%%')",
'icontains': "LIKE CONCAT('%%', {}, '%%')",
'startswith': "LIKE BINARY CONCAT({}, '%%')",
'istartswith': "LIKE CONCAT({}, '%%')",
'endswith': "LIKE BINARY CONCAT('%%', {})",
'iendswith': "LIKE CONCAT('%%', {})",
}
isolation_levels = {
'read uncommitted',
'read committed',
'repeatable read',
'serializable',
}
Database = Database
SchemaEditorClass = DatabaseSchemaEditor
# Classes instantiated in __init__().
client_class = DatabaseClient
creation_class = DatabaseCreation
features_class = DatabaseFeatures
introspection_class = DatabaseIntrospection
ops_class = DatabaseOperations
validation_class = DatabaseValidation
def get_connection_params(self):
kwargs = {
'conv': django_conversions,
'charset': 'utf8',
}
if six.PY2:
kwargs['use_unicode'] = True
settings_dict = self.settings_dict
if settings_dict['USER']:
kwargs['user'] = settings_dict['USER']
if settings_dict['NAME']:
kwargs['db'] = settings_dict['NAME']
if settings_dict['PASSWORD']:
kwargs['passwd'] = force_str(settings_dict['PASSWORD'])
if settings_dict['HOST'].startswith('/'):
kwargs['unix_socket'] = settings_dict['HOST']
elif settings_dict['HOST']:
kwargs['host'] = settings_dict['HOST']
if settings_dict['PORT']:
kwargs['port'] = int(settings_dict['PORT'])
# We need the number of potentially affected rows after an
# "UPDATE", not the number of changed rows.
kwargs['client_flag'] = CLIENT.FOUND_ROWS
# Validate the transaction isolation level, if specified.
options = settings_dict['OPTIONS'].copy()
isolation_level = options.pop('isolation_level', None)
if isolation_level:
isolation_level = isolation_level.lower()
if isolation_level not in self.isolation_levels:
raise ImproperlyConfigured(
"Invalid transaction isolation level '%s' specified.\n"
"Use one of %s, or None." % (
isolation_level,
', '.join("'%s'" % s for s in sorted(self.isolation_levels))
))
# The variable assignment form of setting transaction isolation
# levels will be used, e.g. "set tx_isolation='repeatable-read'".
isolation_level = isolation_level.replace(' ', '-')
self.isolation_level = isolation_level
kwargs.update(options)
return kwargs
def get_new_connection(self, conn_params):
conn = Database.connect(**conn_params)
conn.encoders[SafeText] = conn.encoders[six.text_type]
conn.encoders[SafeBytes] = conn.encoders[bytes]
return conn
def init_connection_state(self):
assignments = []
if self.features.is_sql_auto_is_null_enabled:
# SQL_AUTO_IS_NULL controls whether an AUTO_INCREMENT column on
# a recently inserted row will return when the field is tested
# for NULL. Disabling this brings this aspect of MySQL in line
# with SQL standards.
assignments.append('SQL_AUTO_IS_NULL = 0')
if self.isolation_level:
assignments.append("TX_ISOLATION = '%s'" % self.isolation_level)
if assignments:
with self.cursor() as cursor:
cursor.execute('SET ' + ', '.join(assignments))
def create_cursor(self, name=None):
cursor = self.connection.cursor()
return CursorWrapper(cursor)
def _rollback(self):
try:
BaseDatabaseWrapper._rollback(self)
except Database.NotSupportedError:
pass
def _set_autocommit(self, autocommit):
with self.wrap_database_errors:
self.connection.autocommit(autocommit)
def disable_constraint_checking(self):
"""
Disables foreign key checks, primarily for use in adding rows with forward references. Always returns True,
to indicate constraint checks need to be re-enabled.
"""
self.cursor().execute('SET foreign_key_checks=0')
return True
def enable_constraint_checking(self):
"""
Re-enable foreign key checks after they have been disabled.
"""
# Override needs_rollback in case constraint_checks_disabled is
# nested inside transaction.atomic.
self.needs_rollback, needs_rollback = False, self.needs_rollback
try:
self.cursor().execute('SET foreign_key_checks=1')
finally:
self.needs_rollback = needs_rollback
def check_constraints(self, table_names=None):
"""
Checks each table name in `table_names` for rows with invalid foreign
key references. This method is intended to be used in conjunction with
`disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint
checks were off.
Raises an IntegrityError on the first invalid foreign key reference
encountered (if any) and provides detailed information about the
invalid reference in the error message.
Backends can override this method if they can more directly apply
constraint checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE")
"""
cursor = self.cursor()
if table_names is None:
table_names = self.introspection.table_names(cursor)
for table_name in table_names:
primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
if not primary_key_column_name:
continue
key_columns = self.introspection.get_key_columns(cursor, table_name)
for column_name, referenced_table_name, referenced_column_name in key_columns:
cursor.execute(
"""
SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
LEFT JOIN `%s` as REFERRED
ON (REFERRING.`%s` = REFERRED.`%s`)
WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL
""" % (
primary_key_column_name, column_name, table_name,
referenced_table_name, column_name, referenced_column_name,
column_name, referenced_column_name,
)
)
for bad_row in cursor.fetchall():
raise utils.IntegrityError(
"The row in table '%s' with primary key '%s' has an invalid "
"foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s."
% (
table_name, bad_row[0], table_name, column_name,
bad_row[1], referenced_table_name, referenced_column_name,
)
)
def is_usable(self):
try:
self.connection.ping()
except Database.Error:
return False
else:
return True
@cached_property
def mysql_version(self):
with self.temporary_connection() as cursor:
cursor.execute('SELECT VERSION()')
server_info = cursor.fetchone()[0]
match = server_version_re.match(server_info)
if not match:
raise Exception('Unable to determine MySQL version from version string %r' % server_info)
return tuple(int(x) for x in match.groups())
| mit | 85,078,328,415,849,150 | 40.153453 | 117 | 0.614381 | false |
resmo/ansible | test/units/modules/network/onyx/test_onyx_ospf.py | 68 | 4494 | #
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch
from ansible.modules.network.onyx import onyx_ospf
from units.modules.utils import set_module_args
from .onyx_module import TestOnyxModule, load_fixture
class TestOnyxOspfModule(TestOnyxModule):
module = onyx_ospf
def setUp(self):
super(TestOnyxOspfModule, self).setUp()
self._ospf_exists = True
self.mock_get_config = patch.object(
onyx_ospf.OnyxOspfModule,
"_get_ospf_config")
self.get_config = self.mock_get_config.start()
self.mock_get_interfaces_config = patch.object(
onyx_ospf.OnyxOspfModule,
"_get_ospf_interfaces_config")
self.get_interfaces_config = self.mock_get_interfaces_config.start()
self.mock_load_config = patch(
'ansible.module_utils.network.onyx.onyx.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestOnyxOspfModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
if self._ospf_exists:
config_file = 'onyx_ospf_show.cfg'
self.get_config.return_value = load_fixture(config_file)
config_file = 'onyx_ospf_interfaces_show.cfg'
self.get_interfaces_config.return_value = load_fixture(config_file)
else:
self.get_config.return_value = None
self.get_interfaces_config.return_value = None
self.load_config.return_value = None
def test_ospf_absent_no_change(self):
set_module_args(dict(ospf=3, state='absent'))
self.execute_module(changed=False)
def test_ospf_present_no_change(self):
interface = dict(name='Loopback 1', area='0.0.0.0')
set_module_args(dict(ospf=2, router_id='10.2.3.4',
interfaces=[interface]))
self.execute_module(changed=False)
def test_ospf_present_remove(self):
set_module_args(dict(ospf=2, state='absent'))
commands = ['no router ospf 2']
self.execute_module(changed=True, commands=commands)
def test_ospf_change_router(self):
interface = dict(name='Loopback 1', area='0.0.0.0')
set_module_args(dict(ospf=2, router_id='10.2.3.5',
interfaces=[interface]))
commands = ['router ospf 2', 'router-id 10.2.3.5', 'exit']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ospf_remove_router(self):
interface = dict(name='Loopback 1', area='0.0.0.0')
set_module_args(dict(ospf=2, interfaces=[interface]))
commands = ['router ospf 2', 'no router-id', 'exit']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ospf_add_interface(self):
interfaces = [dict(name='Loopback 1', area='0.0.0.0'),
dict(name='Loopback 2', area='0.0.0.0')]
set_module_args(dict(ospf=2, router_id='10.2.3.4',
interfaces=interfaces))
commands = ['interface loopback 2 ip ospf area 0.0.0.0']
self.execute_module(changed=True, commands=commands)
def test_ospf_remove_interface(self):
set_module_args(dict(ospf=2, router_id='10.2.3.4'))
commands = ['interface loopback 1 no ip ospf area']
self.execute_module(changed=True, commands=commands)
def test_ospf_add(self):
self._ospf_exists = False
interfaces = [dict(name='Loopback 1', area='0.0.0.0'),
dict(name='Vlan 210', area='0.0.0.0'),
dict(name='Eth1/1', area='0.0.0.0'),
dict(name='Po1', area='0.0.0.0')]
set_module_args(dict(ospf=2, router_id='10.2.3.4',
interfaces=interfaces))
commands = ['router ospf 2', 'router-id 10.2.3.4', 'exit',
'interface loopback 1 ip ospf area 0.0.0.0',
'interface vlan 210 ip ospf area 0.0.0.0',
'interface ethernet 1/1 ip ospf area 0.0.0.0',
'interface port-channel 1 ip ospf area 0.0.0.0']
self.execute_module(changed=True, commands=commands)
| gpl-3.0 | 6,812,963,723,654,281,000 | 41.396226 | 92 | 0.607254 | false |
moreati/django-allauth | allauth/socialaccount/providers/twitch/views.py | 62 | 1037 | import requests
from allauth.socialaccount.providers.oauth2.views import (OAuth2Adapter,
OAuth2LoginView,
OAuth2CallbackView)
from .provider import TwitchProvider
class TwitchOAuth2Adapter(OAuth2Adapter):
provider_id = TwitchProvider.id
access_token_url = 'https://api.twitch.tv/kraken/oauth2/token'
authorize_url = 'https://api.twitch.tv/kraken/oauth2/authorize'
profile_url = 'https://api.twitch.tv/kraken/user'
def complete_login(self, request, app, token, **kwargs):
resp = requests.get(self.profile_url,
params={'oauth_token': token.token})
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(TwitchOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(TwitchOAuth2Adapter)
| mit | -8,457,046,543,988,817,000 | 40.48 | 77 | 0.605593 | false |
papaloizouc/battlehack2014 | website/spameggs/spameggs/settings.py | 1 | 2945 | """
Django settings for spameggs project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ue(sv)*=^m!z&a!8t(f1&zvf__mpvi(jck+0w$%uo_07_k69x!'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
TOP_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
def here(*args):
return os.path.realpath(os.path.join(TOP_DIR, *args))
TEMPLATE_DIRS = (
here('theapp/templates'),
)
STATICFILES_DIRS = (
here('theapp/static'),
)
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'theapp',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
ROOT_URLCONF = 'spameggs.urls'
WSGI_APPLICATION = 'spameggs.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'battlehack',
'USER': 'battlehack',
'PASSWORD': 'battlehack',
'HOST': 'localhost',
'PORT': '',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
foo = {}
LOGGING = {
'version': 1,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
'propagate': True,
'level': 'DEBUG',
}
},
}
X_FRAME_OPTIONS = 'DENY'
AUTH_USER_MODEL = 'theapp.User'
| gpl-2.0 | 7,750,703,412,366,322,000 | 21.653846 | 72 | 0.663158 | false |
sugarlabs/sugar | src/jarabe/model/notifications.py | 4 | 4491 | # Copyright (C) 2008 One Laptop Per Child
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import logging
import dbus
from sugar3 import dispatch
from jarabe import config
_DBUS_SERVICE = 'org.freedesktop.Notifications'
_DBUS_IFACE = 'org.freedesktop.Notifications'
_DBUS_PATH = '/org/freedesktop/Notifications'
_instance = None
class NotificationService(dbus.service.Object):
def __init__(self):
bus = dbus.SessionBus()
bus_name = dbus.service.BusName(_DBUS_SERVICE, bus=bus)
dbus.service.Object.__init__(self, bus_name, _DBUS_PATH)
self._notification_counter = 0
self.notification_received = dispatch.Signal()
self.notification_cancelled = dispatch.Signal()
self._buffer = {}
self.buffer_cleared = dispatch.Signal()
def retrieve_by_name(self, name):
if name in self._buffer:
return self._buffer[name]
return None
def clear_by_name(self, name):
if name in self._buffer:
del self._buffer[name]
self.buffer_cleared.send(self, app_name=name)
@dbus.service.method(_DBUS_IFACE,
in_signature='susssava{sv}i', out_signature='u')
def Notify(self, app_name, replaces_id, app_icon, summary, body, actions,
hints, expire_timeout):
logging.debug('Received notification: %r',
[app_name, replaces_id,
'<app_icon>', summary, body, actions, '<hints>',
expire_timeout])
if replaces_id > 0:
notification_id = replaces_id
else:
if self._notification_counter == sys.maxsize:
self._notification_counter = 1
else:
self._notification_counter += 1
notification_id = self._notification_counter
if app_name not in self._buffer:
self._buffer[app_name] = []
self._buffer[app_name].append({'app_name': app_name,
'replaces_id': replaces_id,
'app_icon': app_icon,
'summary': summary,
'body': body,
'actions': actions,
'hints': hints,
'expire_timeout': expire_timeout})
self.notification_received.send(self,
app_name=app_name,
replaces_id=replaces_id,
app_icon=app_icon,
summary=summary,
body=body,
actions=actions,
hints=hints,
expire_timeout=expire_timeout)
return notification_id
@dbus.service.method(_DBUS_IFACE, in_signature='u', out_signature='')
def CloseNotification(self, notification_id):
self.notification_cancelled.send(self, notification_id=notification_id)
@dbus.service.method(_DBUS_IFACE, in_signature='', out_signature='as')
def GetCapabilities(self):
return []
@dbus.service.method(_DBUS_IFACE, in_signature='', out_signature='sss')
def GetServerInformation(self, name, vendor, version):
return 'Sugar Shell', 'Sugar', config.version
@dbus.service.signal(_DBUS_IFACE, signature='uu')
def NotificationClosed(self, notification_id, reason):
pass
@dbus.service.signal(_DBUS_IFACE, signature='us')
def ActionInvoked(self, notification_id, action_key):
pass
def get_service():
global _instance
if not _instance:
_instance = NotificationService()
return _instance
def init():
get_service()
| gpl-3.0 | -3,469,826,087,039,333,000 | 34.085938 | 79 | 0.56758 | false |
giorgiop/scikit-learn | doc/tutorial/text_analytics/solutions/exercise_01_language_train_model.py | 73 | 2264 | """Build a language detector model
The goal of this exercise is to train a linear classifier on text features
that represent sequences of up to 3 consecutive characters so as to be
recognize natural languages by using the frequencies of short character
sequences as 'fingerprints'.
"""
# Author: Olivier Grisel <[email protected]>
# License: Simplified BSD
import sys
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.linear_model import Perceptron
from sklearn.pipeline import Pipeline
from sklearn.datasets import load_files
from sklearn.model_selection import train_test_split
from sklearn import metrics
# The training data folder must be passed as first argument
languages_data_folder = sys.argv[1]
dataset = load_files(languages_data_folder)
# Split the dataset in training and test set:
docs_train, docs_test, y_train, y_test = train_test_split(
dataset.data, dataset.target, test_size=0.5)
# TASK: Build a vectorizer that splits strings into sequence of 1 to 3
# characters instead of word tokens
vectorizer = TfidfVectorizer(ngram_range=(1, 3), analyzer='char',
use_idf=False)
# TASK: Build a vectorizer / classifier pipeline using the previous analyzer
# the pipeline instance should stored in a variable named clf
clf = Pipeline([
('vec', vectorizer),
('clf', Perceptron()),
])
# TASK: Fit the pipeline on the training set
clf.fit(docs_train, y_train)
# TASK: Predict the outcome on the testing set in a variable named y_predicted
y_predicted = clf.predict(docs_test)
# Print the classification report
print(metrics.classification_report(y_test, y_predicted,
target_names=dataset.target_names))
# Plot the confusion matrix
cm = metrics.confusion_matrix(y_test, y_predicted)
print(cm)
#import matlotlib.pyplot as plt
#plt.matshow(cm, cmap=plt.cm.jet)
#plt.show()
# Predict the result on some short new sentences:
sentences = [
u'This is a language detection test.',
u'Ceci est un test de d\xe9tection de la langue.',
u'Dies ist ein Test, um die Sprache zu erkennen.',
]
predicted = clf.predict(sentences)
for s, p in zip(sentences, predicted):
print(u'The language of "%s" is "%s"' % (s, dataset.target_names[p]))
| bsd-3-clause | -4,758,878,166,487,475,000 | 31.342857 | 78 | 0.735424 | false |
sarakha63/persomov | libs/git/ref.py | 110 | 2981 | # Copyright (c) 2009, Rotem Yaari <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of organization nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY Rotem Yaari ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Rotem Yaari BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class Ref(object):
def __init__(self, repo, name):
super(Ref, self).__init__()
self.repo = repo
self.name = name
def getHead(self):
return self.repo._getCommitByRefName(self.name)
def getNormalizedName(self):
return self.name
def getNewCommits(self, comparedTo, limit = ""):
returned = []
command = "cherry %s %s %s" % (self.repo._normalizeRefName(comparedTo),
self.getNormalizedName(),
self.repo._normalizeRefName(limit))
for line in self.repo._getOutputAssertSuccess(command).splitlines():
symbol, sha = line.split()
if symbol == '-':
#already has an equivalent commit
continue
returned.append(self.repo._getCommitByHash(sha.strip()))
return returned
def __eq__(self, ref):
return (type(ref) is type(self) and ref.name == self.name)
def __ne__(self, ref):
return not (self == ref)
def __repr__(self):
return "<%s %s>" % (type(self).__name__, self.getNormalizedName())
################################## Containment #################################
def getMergeBase(self, other):
return self.repo.getMergeBase(self, other)
__and__ = getMergeBase
def contains(self, other):
return self.getMergeBase(other) == other
__contains__ = contains
| gpl-3.0 | 2,606,914,636,875,828,000 | 49.525424 | 84 | 0.651459 | false |
RevolutionMC/Revolution | plugin.video.PsychoTV/resources/lib/libraries/f4mproxy/f4mDownloader.py | 55 | 38147 | import xml.etree.ElementTree as etree
import base64
from struct import unpack, pack
import sys
import io
import os
import time
import itertools
import xbmcaddon
import xbmc
import urllib2,urllib
import traceback
import urlparse
import posixpath
import re
import hmac
import hashlib
import binascii
import zlib
from hashlib import sha256
import cookielib
#import youtube_dl
#from youtube_dl.utils import *
addon_id = 'script.video.F4mProxy'
selfAddon = xbmcaddon.Addon()
__addonname__ = selfAddon.getAddonInfo('name')
__icon__ = selfAddon.getAddonInfo('icon')
downloadPath = xbmc.translatePath(selfAddon.getAddonInfo('profile'))#selfAddon["profile"])
F4Mversion=''
#from Crypto.Cipher import AES
value_unsafe = '%+&;#'
VALUE_SAFE = ''.join(chr(c) for c in range(33, 127)
if chr(c) not in value_unsafe)
def urlencode_param(value):
"""Minimal URL encoding for query parameter"""
return urllib.quote_plus(value, safe=VALUE_SAFE)
class FlvReader(io.BytesIO):
"""
Reader for Flv files
The file format is documented in https://www.adobe.com/devnet/f4v.html
"""
# Utility functions for reading numbers and strings
def read_unsigned_long_long(self):
return unpack('!Q', self.read(8))[0]
def read_unsigned_int(self):
return unpack('!I', self.read(4))[0]
def read_unsigned_char(self):
return unpack('!B', self.read(1))[0]
def read_string(self):
res = b''
while True:
char = self.read(1)
if char == b'\x00':
break
res+=char
return res
def read_box_info(self):
"""
Read a box and return the info as a tuple: (box_size, box_type, box_data)
"""
real_size = size = self.read_unsigned_int()
box_type = self.read(4)
header_end = 8
if size == 1:
real_size = self.read_unsigned_long_long()
header_end = 16
return real_size, box_type, self.read(real_size-header_end)
def read_asrt(self, debug=False):
version = self.read_unsigned_char()
self.read(3) # flags
quality_entry_count = self.read_unsigned_char()
quality_modifiers = []
for i in range(quality_entry_count):
quality_modifier = self.read_string()
quality_modifiers.append(quality_modifier)
segment_run_count = self.read_unsigned_int()
segments = []
#print 'segment_run_count',segment_run_count
for i in range(segment_run_count):
first_segment = self.read_unsigned_int()
fragments_per_segment = self.read_unsigned_int()
segments.append((first_segment, fragments_per_segment))
#print 'segments',segments
return {'version': version,
'quality_segment_modifiers': quality_modifiers,
'segment_run': segments,
}
def read_afrt(self, debug=False):
version = self.read_unsigned_char()
self.read(3) # flags
time_scale = self.read_unsigned_int()
quality_entry_count = self.read_unsigned_char()
quality_entries = []
for i in range(quality_entry_count):
mod = self.read_string()
quality_entries.append(mod)
fragments_count = self.read_unsigned_int()
#print 'fragments_count',fragments_count
fragments = []
for i in range(fragments_count):
first = self.read_unsigned_int()
first_ts = self.read_unsigned_long_long()
duration = self.read_unsigned_int()
if duration == 0:
discontinuity_indicator = self.read_unsigned_char()
else:
discontinuity_indicator = None
fragments.append({'first': first,
'ts': first_ts,
'duration': duration,
'discontinuity_indicator': discontinuity_indicator,
})
#print 'fragments',fragments
return {'version': version,
'time_scale': time_scale,
'fragments': fragments,
'quality_entries': quality_entries,
}
def read_abst(self, debug=False):
version = self.read_unsigned_char()
self.read(3) # flags
bootstrap_info_version = self.read_unsigned_int()
streamType=self.read_unsigned_char()#self.read(1) # Profile,Live,Update,Reserved
islive=False
if (streamType & 0x20) >> 5:
islive=True
print 'LIVE',streamType,islive
time_scale = self.read_unsigned_int()
current_media_time = self.read_unsigned_long_long()
smpteTimeCodeOffset = self.read_unsigned_long_long()
movie_identifier = self.read_string()
server_count = self.read_unsigned_char()
servers = []
for i in range(server_count):
server = self.read_string()
servers.append(server)
quality_count = self.read_unsigned_char()
qualities = []
for i in range(server_count):
quality = self.read_string()
qualities.append(server)
drm_data = self.read_string()
metadata = self.read_string()
segments_count = self.read_unsigned_char()
#print 'segments_count11',segments_count
segments = []
for i in range(segments_count):
box_size, box_type, box_data = self.read_box_info()
assert box_type == b'asrt'
segment = FlvReader(box_data).read_asrt()
segments.append(segment)
fragments_run_count = self.read_unsigned_char()
#print 'fragments_run_count11',fragments_run_count
fragments = []
for i in range(fragments_run_count):
# This info is only useful for the player, it doesn't give more info
# for the download process
box_size, box_type, box_data = self.read_box_info()
assert box_type == b'afrt'
fragments.append(FlvReader(box_data).read_afrt())
return {'segments': segments,
'movie_identifier': movie_identifier,
'drm_data': drm_data,
'fragments': fragments,
},islive
def read_bootstrap_info(self):
"""
Read the bootstrap information from the stream,
returns a dict with the following keys:
segments: A list of dicts with the following keys
segment_run: A list of (first_segment, fragments_per_segment) tuples
"""
total_size, box_type, box_data = self.read_box_info()
assert box_type == b'abst'
return FlvReader(box_data).read_abst()
def read_bootstrap_info(bootstrap_bytes):
return FlvReader(bootstrap_bytes).read_bootstrap_info()
def build_fragments_list(boot_info, startFromFregment=None, live=True):
""" Return a list of (segment, fragment) for each fragment in the video """
res = []
segment_run_table = boot_info['segments'][0]
#print 'segment_run_table',segment_run_table
# I've only found videos with one segment
#if len(segment_run_table['segment_run'])>1:
# segment_run_table['segment_run']=segment_run_table['segment_run'][-2:] #pick latest
frag_start = boot_info['fragments'][0]['fragments']
#print boot_info['fragments']
# sum(j for i, j in segment_run_table['segment_run'])
first_frag_number=frag_start[0]['first']
last_frag_number=frag_start[-1]['first']
if last_frag_number==0:
last_frag_number=frag_start[-2]['first']
endfragment=0
segment_to_start=None
for current in range (len(segment_run_table['segment_run'])):
seg,fregCount=segment_run_table['segment_run'][current]
#print 'segmcount',seg,fregCount
if (not live):
frag_end=last_frag_number
else:
frag_end=first_frag_number+fregCount-1
if fregCount>10000:
frag_end=last_frag_number
#if frag_end
segment_run_table['segment_run'][current]=(seg,fregCount,first_frag_number,frag_end)
if (not startFromFregment==None) and startFromFregment>=first_frag_number and startFromFregment<=frag_end:
segment_to_start=current
first_frag_number+=fregCount
print 'current status',segment_run_table['segment_run']
#if we have no index then take the last segment
if segment_to_start==None:
segment_to_start=len(segment_run_table['segment_run'])-1
#if len(segment_run_table['segment_run'])>2:
# segment_to_start=len(segment_run_table['segment_run'])-2;
if live:
startFromFregment=segment_run_table['segment_run'][-1][3]
# if len(boot_info['fragments'][0]['fragments'])>1: #go bit back
# startFromFregment= boot_info['fragments'][0]['fragments'][-1]['first']
else:
startFromFregment= boot_info['fragments'][0]['fragments'][0]['first'] #start from begining
#if len(boot_info['fragments'][0]['fragments'])>2: #go little bit back
# startFromFregment= boot_info['fragments'][0]['fragments'][-2]['first']
print 'startFromFregment',startFromFregment,boot_info,len(boot_info['fragments'][0]['fragments'])
#print 'segment_to_start',segment_to_start
for currentIndex in range (segment_to_start,len(segment_run_table['segment_run'])):
currentSegment=segment_run_table['segment_run'][currentIndex]
#print 'currentSegment',currentSegment
(seg,fregCount,frag_start,frag_end)=currentSegment
#print 'startFromFregment',startFromFregment,
if (not startFromFregment==None) and startFromFregment>=frag_start and startFromFregment<=frag_end:
frag_start=startFromFregment
#print 'frag_start',frag_start,frag_end
for currentFreg in range(frag_start,frag_end+1):
res.append((seg,currentFreg ))
print 'fragmentlist',res,boot_info
return res
#totalFrags=sum(j for i, j in segment_run_table['segment_run'])
#lastSegment=segment_run_table['segment_run'][-1]
#lastSegmentStart= lastSegment[0]
#lastSegmentFragCount = lastSegment[1]
#print 'totalFrags',totalFrags
#first_frag_number = frag_start[0]['first']
#startFragOfLastSegment= first_frag_number +totalFrags - lastSegmentFragCount
#for (i, frag_number) in zip(range(1, lastSegmentFragCount+1), itertools.count(startFragOfLastSegment)):
# res.append((lastSegmentStart,frag_number )) #this was i, i am using first segement start
#return res
#segment_run_entry = segment_run_table['segment_run'][0]
#print 'segment_run_entry',segment_run_entry,segment_run_table
#n_frags = segment_run_entry[1]
#startingPoint = segment_run_entry[0]
#fragment_run_entry_table = boot_info['fragments'][0]['fragments']
#frag_entry_index = 0
#first_frag_number = fragment_run_entry_table[0]['first']
#first_frag_number=(startingPoint*n_frags) -(n_frags)+1
#print 'THENUMBERS',startingPoint,n_frags,first_frag_number
#for (i, frag_number) in zip(range(1, n_frags+1), itertools.count(first_frag_number)):
# res.append((startingPoint,frag_number )) #this was i, i am using first segement start
#return res
def join(base,url):
join = urlparse.urljoin(base,url)
url = urlparse.urlparse(join)
path = posixpath.normpath(url[2])
return urlparse.urlunparse(
(url.scheme,url.netloc,path,url.params,url.query,url.fragment)
)
def _add_ns(prop):
#print 'F4Mversion',F4Mversion
return '{http://ns.adobe.com/f4m/%s}%s' %(F4Mversion, prop)
#class ReallyQuietDownloader(youtube_dl.FileDownloader):
# def to_screen(sef, *args, **kargs):
# pass
class F4MDownloader():
"""
A downloader for f4m manifests or AdobeHDS.
"""
outputfile =''
clientHeader=None
cookieJar=cookielib.LWPCookieJar()
def __init__(self):
self.init_done=False
def getUrl(self,url, ischunkDownloading=False):
try:
post=None
print 'url',url
openner = urllib2.build_opener(urllib2.HTTPHandler, urllib2.HTTPSHandler)
#cookie_handler = urllib2.HTTPCookieProcessor(self.cookieJar)
#openner = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
if post:
req = urllib2.Request(url, post)
else:
req = urllib2.Request(url)
ua_header=False
if self.clientHeader:
for n,v in self.clientHeader:
req.add_header(n,v)
if n=='User-Agent':
ua_header=True
if not ua_header:
req.add_header('User-Agent','Mozilla/5.0 (Windows NT 6.1; WOW64; rv:30.0) Gecko/20100101 Firefox/30.0')
#response = urllib2.urlopen(req)
if self.proxy and ( (not ischunkDownloading) or self.use_proxy_for_chunks ):
req.set_proxy(self.proxy, 'http')
response = openner.open(req)
data=response.read()
return data
except:
print 'Error in getUrl'
traceback.print_exc()
return None
def _write_flv_header2(self, stream):
"""Writes the FLV header and the metadata to stream"""
# FLV header
stream.write(b'FLV\x01')
stream.write(b'\x01')
stream.write(b'\x00\x00\x00\x09')
# FLV File body
stream.write(b'\x00\x00\x00\x09')
def _write_flv_header(self, stream, metadata):
"""Writes the FLV header and the metadata to stream"""
# FLV header
stream.write(b'FLV\x01')
stream.write(b'\x05')
stream.write(b'\x00\x00\x00\x09')
# FLV File body
stream.write(b'\x00\x00\x00\x00')
# FLVTAG
if metadata:
stream.write(b'\x12') # Script data
stream.write(pack('!L',len(metadata))[1:]) # Size of the metadata with 3 bytes
stream.write(b'\x00\x00\x00\x00\x00\x00\x00')
stream.write(metadata)
# All this magic numbers have been extracted from the output file
# produced by AdobeHDS.php (https://github.com/K-S-V/Scripts)
stream.write(b'\x00\x00\x01\x73')
def init(self, out_stream, url, proxy=None,use_proxy_for_chunks=True,g_stopEvent=None, maxbitrate=0, auth=''):
try:
self.init_done=False
self.total_frags=0
self.init_url=url
self.clientHeader=None
self.status='init'
self.proxy = proxy
self.auth=auth
#self.auth="pvtoken=exp%3D9999999999%7Eacl%3D%252f%252a%7Edata%3DZXhwPTE0MDYzMDMxMTV+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPWQxODA5MWVkYTQ4NDI3NjFjODhjOWQwY2QxNTk3YTI0MWQwOWYwNWI1N2ZmMDE0ZjcxN2QyMTVjZTJkNmJjMDQ%3D%2196e4sdLWrezE46RaCBzzP43/LEM5en2KujAosbeDimQ%3D%7Ehmac%3DACF8A1E4467676C9BCE2721CA5EFF840BD6ED1780046954039373A3B0D942ADC&hdntl=exp=1406303115~acl=%2f*~data=hdntl~hmac=4ab96fa533fd7c40204e487bfc7befaf31dd1f49c27eb1f610673fed9ff97a5f&als=0,2,0,0,0,NaN,0,0,0,37,f,52293145.57,52293155.9,t,s,GARWLHLMHNGA,2.11.3,37&hdcore=2.11.3"
if self.auth ==None or self.auth =='None' :
self.auth=''
if self.proxy and len(self.proxy)==0:
self.proxy=None
self.use_proxy_for_chunks=use_proxy_for_chunks
self.out_stream=out_stream
self.g_stopEvent=g_stopEvent
self.maxbitrate=maxbitrate
if '|' in url:
sp = url.split('|')
url = sp[0]
self.clientHeader = sp[1]
self.clientHeader= urlparse.parse_qsl(self.clientHeader)
print 'header recieved now url and headers are',url, self.clientHeader
self.status='init done'
self.url=url
#self.downloadInternal( url)
return self.preDownoload()
#os.remove(self.outputfile)
except:
traceback.print_exc()
self.status='finished'
return False
def preDownoload(self):
global F4Mversion
try:
self.seqNumber=0
self.live=False #todo find if its Live or not
man_url = self.url
url=self.url
print 'Downloading f4m manifest'
manifest = self.getUrl(man_url)#.read()
if not manifest:
return False
print len(manifest)
try:
print manifest
except: pass
self.status='manifest done'
#self.report_destination(filename)
#dl = ReallyQuietDownloader(self.ydl, {'continuedl': True, 'quiet': True, 'noprogress':True})
version_fine="xmlns=\".*?\/([0-9].*?)\""
F4Mversion =re.findall(version_fine, manifest)[0]
#print F4Mversion,_add_ns('media')
auth_patt='<pv-2.0>(.*?)<'
auth_obj =re.findall(auth_patt, manifest)
self.auth20=''
if auth_obj and len(auth_obj)>0:
self.auth20=auth_obj[0] #not doing anything for time being
print 'auth',self.auth,self.auth20
#quick for one example where the xml was wrong.
if '\"bootstrapInfoId' in manifest:
manifest=manifest.replace('\"bootstrapInfoId','\" bootstrapInfoId')
doc = etree.fromstring(manifest)
print doc
# Added the-one 05082014
# START
# Check if manifest defines a baseURL tag
baseURL_tag = doc.find(_add_ns('baseURL'))
if baseURL_tag != None:
man_url = baseURL_tag.text
url = man_url
self.url = url
print 'base url defined as: %s' % man_url
# END
try:
#formats = [(int(f.attrib.get('bitrate', -1)),f) for f in doc.findall(_add_ns('media'))]
formats=[]
for f in doc.findall(_add_ns('media')):
vtype=f.attrib.get('type', '')
if f.attrib.get('type', '')=='video' or vtype=='' :
formats.append([int(f.attrib.get('bitrate', -1)),f])
print 'format works',formats
except:
formats=[(int(0),f) for f in doc.findall(_add_ns('media'))]
#print 'formats',formats
formats = sorted(formats, key=lambda f: f[0])
if self.maxbitrate==0:
rate, media = formats[-1]
elif self.maxbitrate==-1:
rate, media = formats[0]
else: #find bitrate
brselected=None
rate, media=None,None
for r, m in formats:
if r<=self.maxbitrate:
rate, media=r,m
else:
break
if media==None:
rate, media = formats[-1]
dest_stream = self.out_stream
print 'rate selected',rate
self.metadata=None
try:
self.metadata = base64.b64decode(media.find(_add_ns('metadata')).text)
print 'metadata stream read done'#,media.find(_add_ns('metadata')).text
#self._write_flv_header(dest_stream, metadata)
#dest_stream.flush()
except: pass
# Modified the-one 05082014
# START
# url and href can be used interchangeably
# so if url attribute is not present
# check for href attribute
try:
mediaUrl=media.attrib['url']
except:
mediaUrl=media.attrib['href']
# END
# Added the-one 05082014
# START
# if media url/href points to another f4m file
if '.f4m' in mediaUrl:
sub_f4m_url = join(man_url,mediaUrl)
print 'media points to another f4m file: %s' % sub_f4m_url
print 'Downloading f4m sub manifest'
sub_manifest = self.getUrl(sub_f4m_url)#.read()
if not sub_manifest:
return False
print len(sub_manifest)
try:
print sub_manifest
except: pass
self.status='sub manifest done'
F4Mversion =re.findall(version_fine, sub_manifest)[0]
doc = etree.fromstring(sub_manifest)
print doc
media = doc.find(_add_ns('media'))
if media == None:
return False
try:
self.metadata = base64.b64decode(media.find(_add_ns('metadata')).text)
print 'metadata stream read done'
except: pass
try:
mediaUrl=media.attrib['url']
except:
mediaUrl=media.attrib['href']
# END
try:
bootStrapID = media.attrib['bootstrapInfoId']
except: bootStrapID='xx'
#print 'mediaUrl',mediaUrl
base_url = join(man_url,mediaUrl)#compat_urlparse.urljoin(man_url,media.attrib['url'])
if mediaUrl.endswith('/') and not base_url.endswith('/'):
base_url += '/'
self.base_url=base_url
bsArray=doc.findall(_add_ns('bootstrapInfo'))
print 'bootStrapID',bootStrapID
#bootStrapID='bootstrap_450'
bootstrap=self.getBootStrapWithId(bsArray,bootStrapID)
if bootstrap==None: #if not available then find any!
print 'bootStrapID NOT Found'
bootstrap=doc.findall(_add_ns('bootstrapInfo'))[0]
else:
print 'found bootstrap with id',bootstrap
#print 'bootstrap',bootstrap
bootstrapURL1=''
try:
bootstrapURL1=bootstrap.attrib['url']
except: pass
bootstrapURL=''
bootstrapData=None
queryString=None
if bootstrapURL1=='':
bootstrapData=base64.b64decode(doc.findall(_add_ns('bootstrapInfo'))[0].text)
#
else:
from urlparse import urlparse
queryString = urlparse(url).query
print 'queryString11',queryString
if len(queryString)==0: queryString=None
if queryString==None or '?' in bootstrap.attrib['url']:
bootstrapURL = join(man_url,bootstrap.attrib['url'])# take out querystring for later
queryString = urlparse(bootstrapURL).query
print 'queryString override',queryString
if len(queryString)==0:
queryString=None
if len(self.auth)>0:
bootstrapURL+='?'+self.auth
queryString=self.auth#self._pv_params('',self.auth20)#not in use
else:
print 'queryString!!',queryString
bootstrapURL = join(man_url,bootstrap.attrib['url'])+'?'+queryString
if len(self.auth)>0:
authval=self.auth#self._pv_params('',self.auth20)#not in use
bootstrapURL = join(man_url,bootstrap.attrib['url'])+'?'+authval
queryString=authval
print 'bootstrapURL',bootstrapURL
if queryString==None:
queryString=''
self.bootstrapURL=bootstrapURL
self.queryString=queryString
self.bootstrap, self.boot_info, self.fragments_list,self.total_frags=self.readBootStrapInfo(bootstrapURL,bootstrapData)
self.init_done=True
return True
except:
traceback.print_exc()
return False
def keep_sending_video(self,dest_stream, segmentToStart=None, totalSegmentToSend=0):
try:
self.status='download Starting'
self.downloadInternal(self.url,dest_stream,segmentToStart,totalSegmentToSend)
except:
traceback.print_exc()
self.status='finished'
def downloadInternal(self,url,dest_stream ,segmentToStart=None,totalSegmentToSend=0):
global F4Mversion
try:
#dest_stream = self.out_stream
queryString=self.queryString
print 'segmentToStart',segmentToStart
if self.live or segmentToStart==0 or segmentToStart==None:
print 'writing metadata'#,len(self.metadata)
self._write_flv_header(dest_stream, self.metadata)
dest_stream.flush()
#elif segmentToStart>0 and not self.live:
# self._write_flv_header2(dest_stream)
# dest_stream.flush()
url=self.url
bootstrap, boot_info, fragments_list,total_frags=(self.bootstrap, self.boot_info, self.fragments_list,self.total_frags)
print boot_info, fragments_list,total_frags
self.status='bootstrap done'
self.status='file created'
self.downloaded_bytes = 0
self.bytes_in_disk = 0
self.frag_counter = 0
start = time.time()
frags_filenames = []
self.seqNumber=0
if segmentToStart and not self.live :
self.seqNumber=segmentToStart
if self.seqNumber>=total_frags:
self.seqNumber=total_frags-1
#for (seg_i, frag_i) in fragments_list:
#for seqNumber in range(0,len(fragments_list)):
self.segmentAvailable=0
frameSent=0
while True:
#if not self.live:
# _write_flv_header2
if self.g_stopEvent and self.g_stopEvent.isSet():
return
seg_i, frag_i=fragments_list[self.seqNumber]
self.seqNumber+=1
frameSent+=1
name = u'Seg%d-Frag%d' % (seg_i, frag_i)
#print 'base_url',base_url,name
url = self.base_url + name
if queryString and '?' not in url:
url+='?'+queryString
elif '?' in self.base_url:
url = self.base_url.split('?')[0] + name+'?'+self.base_url.split('?')[1]
#print(url),base_url,name
#frag_filename = u'%s-%s' % (tmpfilename, name)
#success = dl._do_download(frag_filename, {'url': url})
print 'downloading....',url
success=False
urlTry=0
while not success and urlTry<5:
success = self.getUrl(url,True)
if not success: xbmc.sleep(300)
urlTry+=1
print 'downloaded',not success==None,url
if not success:
return False
#with open(frag_filename, 'rb') as down:
if 1==1:
down_data = success#down.read()
reader = FlvReader(down_data)
while True:
_, box_type, box_data = reader.read_box_info()
print 'box_type',box_type,len(box_data)
#if box_type == b'afra':
# dest_stream.write(box_data)
# dest_stream.flush()
# break
if box_type == b'mdat':
isDrm=True if ord(box_data[0])&1 else False
#print 'isDrm',isDrm,repr(box_data)
if 1==2 and isDrm:
print 'drm',repr(box_data[1:17])
box_data=box_data[17:]
dest_stream.write(box_data)
dest_stream.flush()
break
# Using the following code may fix some videos, but
# only in mplayer, VLC won't play the sound.
# mdat_reader = FlvReader(box_data)
# media_type = mdat_reader.read_unsigned_char()
# while True:
# if mdat_reader.read_unsigned_char() == media_type:
# if mdat_reader.read_unsigned_char() == 0x00:
# break
# dest_stream.write(pack('!B', media_type))
# dest_stream.write(b'\x00')
# dest_stream.write(mdat_reader.read())
# break
self.status='play'
if self.seqNumber==len(fragments_list) or (totalSegmentToSend>0 and frameSent==totalSegmentToSend):
if not self.live:
break
self.seqNumber=0
#todo if the url not available then get manifest and get the data again
total_frags=None
try:
bootstrap, boot_info, fragments_list,total_frags=self.readBootStrapInfo(self.bootstrapURL,None,updateMode=True,lastSegment=seg_i, lastFragement=frag_i)
except:
traceback.print_exc()
pass
if total_frags==None:
break
del self.downloaded_bytes
del self.frag_counter
except:
traceback.print_exc()
return
def getBootStrapWithId (self,BSarray, id):
try:
for bs in BSarray:
print 'compare val is ',bs.attrib['id'], 'id', id
if bs.attrib['id']==id:
print 'gotcha'
return bs
except: pass
return None
def readBootStrapInfo(self,bootstrapUrl,bootStrapData, updateMode=False, lastFragement=None,lastSegment=None):
try:
retries=0
while retries<=10:
if self.g_stopEvent and self.g_stopEvent.isSet():
return
if not bootStrapData:
bootStrapData =self.getUrl(bootstrapUrl)
if bootStrapData==None:
retries+=1
continue
#print 'bootstrapData',len(bootStrapData)
bootstrap = bootStrapData#base64.b64decode(bootStrapData)#doc.findall(_add_ns('bootstrapInfo'))[0].text)
#print 'boot stream read done'
boot_info,self.live = read_bootstrap_info(bootstrap)
#print 'boot_info read done',boot_info
newFragement=None
if not lastFragement==None:
newFragement=lastFragement+1
fragments_list = build_fragments_list(boot_info,newFragement,self.live)
total_frags = len(fragments_list)
#print 'fragments_list',fragments_list, newFragement
#print lastSegment
if updateMode and (len(fragments_list)==0 or ( newFragement and newFragement>fragments_list[0][1])):
#todo check lastFragement to see if we got valid data
print 'retrying......'
bootStrapData=None
retries+=1
xbmc.sleep(2000)
continue
return bootstrap, boot_info, fragments_list,total_frags
except:
traceback.print_exc()
def _pv_params(self, pvswf, pv):
"""Returns any parameters needed for Akamai HD player verification.
Algorithm originally documented by KSV, source:
http://stream-recorder.com/forum/showpost.php?p=43761&postcount=13
"""
pv="ZXhwPTE0MDYyODMxOTF+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPTgwNTA0N2E1Yjk5ZmFjMjMzMDY0N2MxMzkyNGM0MDNiYzY1YjZmYzgyYTZhMjYyZDIxNDdkZTExZjI1MzQ5ZDI=;hdntl=exp=1406283191~acl=%2f*~data=hdntl~hmac=b65dc0c5ae60570f105984f0cc5ec6ce3a51422a7a1442e09f55513718ba80bf"
(data, hdntl) = pv.split(";")
SWF_VERIFICATION_KEY = b"Genuine Adobe Flash Player 001"
#SWF_VERIFICATION_KEY=binascii.unhexlify("9b673b13fa4682ed14c3cfa5af5310274b514c4133e9b3a81e6e3aba009l2564")
SWF_VERIFICATION_KEY = binascii.unhexlify(b"BD938D5EE6D9F42016F9C56577B6FDCF415FE4B184932B785AB32BCADC9BB592")
swf = self.getUrl('http://www.wat.tv/images/v70/PlayerLite.swf',True)
#AKAMAIHD_PV_KEY = unhexlify(b"BD938D5EE6D9F42016F9C56577B6FDCF415FE4B184932B785AB32BCADC9BB592")
AKAMAIHD_PV_KEY = "9b673b13fa4682ed14c3cfa5af5310274b514c4133e9b3a81e6e3aba009l2564"
hash = hashlib.sha256()
hash.update(self.swfdecompress(swf))
hash = base64.b64encode(hash.digest()).decode("ascii")
print 'hash',hash
hash="96e4sdLWrezE46RaCBzzP43/LEM5en2KujAosbeDimQ="
print 'hash',hash
#data="ZXhwPTE0MDYyMDQ3NjB+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPWEzMjBlZDI5YjI1MDkwN2ExODcyMTJlOWJjNGFlNGUzZjA3MTM3ODk1ZDk4NmI2ZDVkMzczNzNhYzNiNDgxOWU="
msg = "exp=9999999999~acl=%2f%2a~data={0}!{1}".format(data, hash)
auth = hmac.new(AKAMAIHD_PV_KEY, msg.encode("ascii"), sha256)
pvtoken = "{0}~hmac={1}".format(msg, auth.hexdigest())
# The "hdntl" parameter can be accepted as a cookie or passed in the
# query string, but the "pvtoken" parameter can only be in the query
# string
print 'pvtoken',pvtoken
#return "pvtoken={}&{}".format(
#urlencode_param(pvtoken), urlencode_param(hdntl))
params=urllib.urlencode({'pvtoken':pvtoken})+'&'+hdntl+'&hdcore=2.11.3'
#params='pvtoken=exp%3D9999999999%7Eacl%3D%252f%252a%7Edata%3DZXhwPTE0MDYwNDMzOTN+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPWQxMTk0ZDc4NDExMDYwNjZlNDI5OWU2NTc3ODA0Mzk0ODU5NGZiMDQ5Njk2OGNiYzJiOGU2OTI2MjIzMjczZTA%3D%2196e4sdLWrezE46RaCBzzP43/LEM5en2KujAosbeDimQ%3D%7Ehmac%3D1BE9DEB8262AB4886A0CB9E8376D04652F015751B88DD3D2201DE463D9E47733&hdntl=exp=1406043393~acl=%2f*~data=hdntl~hmac=28d5e28f47b7b3821fafae0250ba37091f2fc66d1a9d39b76b925c423458c537'+'&hdcore=2.11.3'
#php AdobeHDS.php --manifest "http://nt1livhdsweb-lh.akamaihd.net/z/live_1@90590/manifest.f4m?hdnea=st=1405958620~exp=1405960420~acl=/*~hmac=5ca0d2521a99c897fb9ffaf6ed9c2e40e5d0300cdcdd9dfb7302d9e32a84f98d&hdcore=2.11.3&g=VQYTYCFRUDRA"
#params="pvtoken=exp%3D9999999999%7Eacl%3D%252f%252a%7Edata%3DZXhwPTE0MDYwNDUwNDZ+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPWYwYWQ5ZGQyNDJlYjdiYjQ2YmZhMzk3MjY3MzE0ZWZiOWVlYTY5MDMzYWE2ODM5ZDM1ZWVjMWM1ZDUzZTk3ZjA%3D%2196e4sdLWrezE46RaCBzzP43/LEM5en2KujAosbeDimQ%3D%7Ehmac%3D9FCCB6BC90C17E8057EE52CD53DDF0C6D07B20638D68B8FFCE98ED74153AA960&hdntl=exp=1406045046~acl=%2f*~data=hdntl~hmac=11e323633ad708a11e57a91e8c685011292f42936f5f7f3b1cb0fb8d2266586a&als=0,2,0,0,0,NaN,0,0,0,52,f,52035079.57,52035089.9,t,s,VQYTYCFRUDRA,2.11.3,52&hdcore=2.11.3"
#--useragent "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:30.0) Gecko/20100101 Firefox/30.0"
#+'&als=0,2,0,0,0,NaN,0,0,0,47,f,52018363.57,52018373.9,t,s,HPFXDUMCMNPG,2.11.3,47&hdcore=2.11.3'
params=params.replace('%2B','+')
params=params.replace('%2F','/')
#params='pvtoken=' +pvtoken+'&'+hdntl
#params = [("pvtoken", pvtoken)]
#params.extend(parse_qsl(hdntl, keep_blank_values=True))
#params='pvtoken=exp%3D9999999999%7Eacl%3D%252f%252a%7Edata%3DZXhwPTE0MDYwMzc2Njl+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPWZjYzY5OTVkYjE5ODIxYTJlNDM4YTdhMWNmZjMyN2RhNTViOWNhMWM4NjZhZjYxM2ZkNDI4MTMwNjU4MjFjMjM%3D%2196e4sdLWrezE46RaCBzzP43/LEM5en2KujAosbeDimQ%3D%7Ehmac%3DFA3BCC1CF6466CAFFCC6EF5CB2855ED065F36687CBFCD11570B7D702F71F10A6&hdntl=exp=1406037669~acl=%2f*~data=hdntl~hmac=4ab5ad38849b952ae93721af7451936b4c5906258d575eda11e52a05f78c7d75&als=0,2,0,0,0,NaN,0,0,0,96,f,52027699.57,52027709.89,t,s,RUIDLGQGDHVH,2.11.3,90&hdcore=2.11.3'
#print '_pv_params params',params
print params
print "pvtoken=exp%3D9999999999%7Eacl%3D%252f%252a%7Edata%3DZXhwPTE0MDYyODMxOTF+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPTgwNTA0N2E1Yjk5ZmFjMjMzMDY0N2MxMzkyNGM0MDNiYzY1YjZmYzgyYTZhMjYyZDIxNDdkZTExZjI1MzQ5ZDI%3D%2196e4sdLWrezE46RaCBzzP43/LEM5en2KujAosbeDimQ%3D%7Ehmac%3D47A2B2AA9570ECFB37966C884174D608D86A7DE2466DE7EB48A6F118A155BD80&hdntl=exp=1406283191~acl=%2f*~data=hdntl~hmac=b65dc0c5ae60570f105984f0cc5ec6ce3a51422a7a1442e09f55513718ba80bf"
return "pvtoken=exp%3D9999999999%7Eacl%3D%252f%252a%7Edata%3DZXhwPTE0MDYzMDMxMTV+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPWQxODA5MWVkYTQ4NDI3NjFjODhjOWQwY2QxNTk3YTI0MWQwOWYwNWI1N2ZmMDE0ZjcxN2QyMTVjZTJkNmJjMDQ%3D%2196e4sdLWrezE46RaCBzzP43/LEM5en2KujAosbeDimQ%3D%7Ehmac%3DACF8A1E4467676C9BCE2721CA5EFF840BD6ED1780046954039373A3B0D942ADC&hdntl=exp=1406303115~acl=%2f*~data=hdntl~hmac=4ab96fa533fd7c40204e487bfc7befaf31dd1f49c27eb1f610673fed9ff97a5f&als=0,2,0,0,0,NaN,0,0,0,37,f,52293145.57,52293155.9,t,s,GARWLHLMHNGA,2.11.3,37&hdcore=2.11.3"
return params
def swfdecompress(self,data):
if data[:3] == b"CWS":
data = b"F" + data[1:8] + zlib.decompress(data[8:])
return data
| gpl-2.0 | -6,673,248,678,854,322,000 | 43.049654 | 548 | 0.577791 | false |
bzennn/blog_flask | python/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py | 354 | 5544 | """A collection of modules for iterating through different kinds of
tree, generating tokens identical to those produced by the tokenizer
module.
To create a tree walker for a new type of tree, you need to do
implement a tree walker object (called TreeWalker by convention) that
implements a 'serialize' method taking a tree as sole argument and
returning an iterator generating tokens.
"""
from __future__ import absolute_import, division, unicode_literals
from .. import constants
from .._utils import default_etree
__all__ = ["getTreeWalker", "pprint", "dom", "etree", "genshi", "etree_lxml"]
treeWalkerCache = {}
def getTreeWalker(treeType, implementation=None, **kwargs):
"""Get a TreeWalker class for various types of tree with built-in support
Args:
treeType (str): the name of the tree type required (case-insensitive).
Supported values are:
- "dom": The xml.dom.minidom DOM implementation
- "etree": A generic walker for tree implementations exposing an
elementtree-like interface (known to work with
ElementTree, cElementTree and lxml.etree).
- "lxml": Optimized walker for lxml.etree
- "genshi": a Genshi stream
Implementation: A module implementing the tree type e.g.
xml.etree.ElementTree or cElementTree (Currently applies to the
"etree" tree type only).
"""
treeType = treeType.lower()
if treeType not in treeWalkerCache:
if treeType == "dom":
from . import dom
treeWalkerCache[treeType] = dom.TreeWalker
elif treeType == "genshi":
from . import genshi
treeWalkerCache[treeType] = genshi.TreeWalker
elif treeType == "lxml":
from . import etree_lxml
treeWalkerCache[treeType] = etree_lxml.TreeWalker
elif treeType == "etree":
from . import etree
if implementation is None:
implementation = default_etree
# XXX: NEVER cache here, caching is done in the etree submodule
return etree.getETreeModule(implementation, **kwargs).TreeWalker
return treeWalkerCache.get(treeType)
def concatenateCharacterTokens(tokens):
pendingCharacters = []
for token in tokens:
type = token["type"]
if type in ("Characters", "SpaceCharacters"):
pendingCharacters.append(token["data"])
else:
if pendingCharacters:
yield {"type": "Characters", "data": "".join(pendingCharacters)}
pendingCharacters = []
yield token
if pendingCharacters:
yield {"type": "Characters", "data": "".join(pendingCharacters)}
def pprint(walker):
"""Pretty printer for tree walkers"""
output = []
indent = 0
for token in concatenateCharacterTokens(walker):
type = token["type"]
if type in ("StartTag", "EmptyTag"):
# tag name
if token["namespace"] and token["namespace"] != constants.namespaces["html"]:
if token["namespace"] in constants.prefixes:
ns = constants.prefixes[token["namespace"]]
else:
ns = token["namespace"]
name = "%s %s" % (ns, token["name"])
else:
name = token["name"]
output.append("%s<%s>" % (" " * indent, name))
indent += 2
# attributes (sorted for consistent ordering)
attrs = token["data"]
for (namespace, localname), value in sorted(attrs.items()):
if namespace:
if namespace in constants.prefixes:
ns = constants.prefixes[namespace]
else:
ns = namespace
name = "%s %s" % (ns, localname)
else:
name = localname
output.append("%s%s=\"%s\"" % (" " * indent, name, value))
# self-closing
if type == "EmptyTag":
indent -= 2
elif type == "EndTag":
indent -= 2
elif type == "Comment":
output.append("%s<!-- %s -->" % (" " * indent, token["data"]))
elif type == "Doctype":
if token["name"]:
if token["publicId"]:
output.append("""%s<!DOCTYPE %s "%s" "%s">""" %
(" " * indent,
token["name"],
token["publicId"],
token["systemId"] if token["systemId"] else ""))
elif token["systemId"]:
output.append("""%s<!DOCTYPE %s "" "%s">""" %
(" " * indent,
token["name"],
token["systemId"]))
else:
output.append("%s<!DOCTYPE %s>" % (" " * indent,
token["name"]))
else:
output.append("%s<!DOCTYPE >" % (" " * indent,))
elif type == "Characters":
output.append("%s\"%s\"" % (" " * indent, token["data"]))
elif type == "SpaceCharacters":
assert False, "concatenateCharacterTokens should have got rid of all Space tokens"
else:
raise ValueError("Unknown token type, %s" % type)
return "\n".join(output)
| gpl-3.0 | 8,496,672,513,950,690,000 | 37.769231 | 94 | 0.525072 | false |
chys87/pyCxxLookup | cxxlookup/expr.py | 1 | 28952 | #!/usr/bin/env python3
# vim: set ts=4 sts=4 sw=4 expandtab cc=80:
# Copyright (c) 2014, 2016, chys <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of chys <[email protected]> nor the names of other
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from . import utils
try:
from . import _speedups
except ImportError:
_speedups = None
# Signed types only allowed for intermediate values
# Unsigned: Number of bits
# Signed: Number of bits - 1
# E.g.: 31 = int32_t; 32 = uint32_t
def type_name(type):
'''
>>> type_name(7), type_name(32)
('int8_t', 'uint32_t')
'''
if (type & 1):
return 'int{}_t'.format(type + 1)
else:
return 'uint{}_t'.format(type)
def type_bytes(type):
'''
>>> list(map(type_bytes, [7, 8, 15, 16, 31, 32, 63, 64]))
[1, 1, 2, 2, 4, 4, 8, 8]
'''
return (type + 7) // 8
def const_type(value):
if value >= 2**16:
if value >= 2**32:
return 64
else:
return 32
elif value >= 2**8:
return 16
else:
return 8
class ExprMeta(type):
"""This is for performance purpose.
Add IS_*** constants to Expr* classes to replace ininstance,
which turned out to be one of the bottlenecks of pyCxxLookup
>>> Expr.IS_CONST, Expr.IS_VAR, Expr.IS_RSHIFT
(False, False, False)
>>> ExprConst.IS_CONST, ExprConst.IS_VAR, ExprConst.IS_RSHIFT
(True, False, False)
>>> ExprVar.IS_CONST, ExprVar.IS_VAR, ExprVar.IS_RSHIFT
(False, True, False)
>>> ExprRShift.IS_CONST, ExprRShift.IS_VAR, ExprRShift.IS_RSHIFT
(False, False, True)
"""
def __new__(cls, name, bases, namespace, **kwds):
result = type.__new__(cls, name, bases, dict(namespace))
if name != 'Expr' and name.startswith('Expr'):
is_name = 'IS_' + name[4:].upper()
setattr(result, is_name, True)
setattr(Expr, is_name, False)
for extra_name in result.__dict__.get('IS_ALSO', ()):
is_name = 'IS_' + extra_name
setattr(result, is_name, True)
setattr(Expr, is_name, False)
return result
class Expr(metaclass=ExprMeta):
__slots__ = ()
def __str__(self):
raise NotImplementedError
def statics(self, vs):
return ''.join(filter(None, (x.statics(vs) for x in self.children)))
children = ()
rtype = None
@property
def optimized(self):
return self
def walk(self):
"""Recursively visit itself and all children."""
yield self
q = [self]
q_pop = q.pop
q_extend = q.extend
while q:
expr = q_pop()
children = expr.children
yield from children
q_extend(children)
def walk_tempvar(self):
"""Shortcut for filter(lambda x: x.IS_TEMPVAR, self.walk())
"""
return (x for x in self.walk() if x.IS_TEMPVAR)
def _complicated(self, threshold):
for expr in self.walk():
threshold -= 1
if not threshold:
return True
return False
def extract_subexprs(self, threshold, callback, allow_new):
for subexpr in self.children:
subexpr.extract_subexprs(threshold, callback, allow_new)
def __add__(self, r):
return Add(self, r)
def __mul__(self, r):
return ExprMul(self, exprize(r))
def __floordiv__(self, r):
return ExprDiv(self, exprize(r))
def __rfloordiv__(self, r):
return ExprDiv(exprize(r), self)
def __mod__(self, r):
return ExprMod(self, exprize(r))
def __rmod__(self, r):
return ExprMod(exprize(r), self)
def __sub__(self, r):
r = exprize(r)
if not r.IS_CONST:
return NotImplemented
return Add(self, -r)
def __and__(self, r):
return ExprAnd(self, exprize(r))
def __lshift__(self, r):
return ExprLShift(self, exprize(r))
def __rshift__(self, r):
return ExprRShift(self, exprize(r))
def __rlshift__(self, r):
return ExprLShift(exprize(r), self)
def __rrshift__(self, r):
return ExprRShift(exprize(r), self)
def __eq__(self, r):
return ExprCompare(self, '==', exprize(r))
def __ne__(self, r):
return ExprCompare(self, '!=', exprize(r))
def __lt__(self, r):
return ExprCompare(self, '<', exprize(r))
def __le__(self, r):
return ExprCompare(self, '<=', exprize(r))
def __gt__(self, r):
return ExprCompare(self, '>', exprize(r))
def __ge__(self, r):
return ExprCompare(self, '>=', exprize(r))
__radd__ = __add__
__rmul__ = __mul__
__rand__ = __and__
class ExprVar(Expr):
__slots__ = 'rtype',
def __init__(self, type):
self.rtype = type
class ExprFixedVar(ExprVar):
__slots__ = 'name',
def __init__(self, type, name):
super().__init__(type)
self.name = name
def __str__(self):
return self.name
class ExprTempVar(ExprVar):
__slots__ = 'var',
_name_cache = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')
def __init__(self, type, var):
super().__init__(type)
self.var = var
@classmethod
def get_name(cls, var):
'''
>>> list(map(ExprTempVar.get_name, (0, 26, 52)))
['A', 'AA', 'BA']
'''
cache = cls._name_cache
try:
s = cache[var]
except IndexError:
cache += [None] * (var + 1 - len(cache))
else:
if s is not None:
return s
length = 1
expressible = 26
ind = var
while ind >= expressible:
length += 1
ind -= expressible
expressible *= 26
s = ''
for _ in range(length):
s = chr(ord('A') + (ind % 26)) + s
ind //= 26
cache[var] = s
return s
def __str__(self):
return self.get_name(self.var)
class ExprConst(Expr):
__slots__ = 'rtype', 'value'
def __init__(self, type, value, *, int=int):
self.rtype = type
self.value = int(value)
def __str__(self):
if -10 < self.value < 10:
value_s = str(self.value)
else:
value_s = hex(self.value)
if self.rtype < 64:
return value_s + 'u'
else:
return 'UINT64_C({})'.format(value_s)
def _complicated(self, threshold):
# Always assign 64-bit constant to a variable for readability.
return (self.rtype == 64)
@staticmethod
def combine(const_exprs):
"""Combine multiple ExprConst into one."""
const_value = 0
const_type = 32
for expr in const_exprs:
const_value += expr.value
const_type = max(const_type, expr.rtype)
if const_value == 0:
return None
else:
return ExprConst(const_type, const_value)
def __neg__(self):
return ExprConst(self.rtype, -self.value)
class ExprAdd(Expr):
def __init__(self, exprs, const, *, max=max, tuple=tuple):
assert const is None or const.IS_CONST
self.exprs = tuple(exprs)
self.const = const
rtype = max([x.rtype for x in self.children])
self.rtype = max(rtype, 31) # C type-promotion rule
def __str__(self):
res = ' + '.join(map(str, self.exprs))
if self.const:
const_value = self.const.value
if const_value >= 0:
res += ' + ' + str(self.const)
else:
res += ' - ' + str(ExprConst(self.const.rtype,
-const_value))
return '(' + res + ')'
@property
def children(self):
const = self.const
if const:
return self.exprs + (const,)
else:
return self.exprs
@utils.cached_property
def optimized(self):
exprs = []
const_exprs = []
if self.const:
const_exprs.append(self.const)
for expr in self.exprs:
expr = expr.optimized
if expr.IS_ADD:
exprs.extend(expr.exprs)
if expr.const:
const_exprs.append(expr.const)
elif expr.IS_CONST:
const_exprs.append(expr)
else:
exprs.append(expr)
const = ExprConst.combine(const_exprs)
# (a ? c1 : c2) + c3 ==> (a ? c1 + c3 : c2 + c3)
if const:
const_value = const.value
for i, expr in enumerate(exprs):
if expr.IS_COND and \
expr.exprT.IS_CONST and expr.exprF.IS_CONST and \
(min(expr.exprT.value, expr.exprF.value) + const_value
>= 0):
expr = ExprCond(
expr.cond,
ExprConst(self.rtype, expr.exprT.value + const_value),
ExprConst(self.rtype, expr.exprF.value + const_value))
exprs[i] = expr.optimized
const = None
break
self.exprs = exprs = tuple(exprs)
self.const = const
if len(exprs) == 1 and not const:
return exprs[0]
return self
def extract_subexprs(self, threshold, callback, allow_new):
exprs = []
for expr in self.exprs:
expr.extract_subexprs(threshold, callback, allow_new)
expr = callback(expr, allow_new and expr._complicated(threshold))
exprs.append(expr)
self.exprs = tuple(exprs)
if self.const:
self.const = callback(
self.const, allow_new and self.const._complicated(threshold))
class ExprBinary(Expr):
__slots__ = 'left', 'right', 'rtype'
def __init__(self, left, right, rtype=None, *, max=max):
self.left = left = left.optimized
self.right = right = right.optimized
self.rtype = rtype or max(31, left.rtype, right.rtype)
@property
def children(self):
return self.left, self.right
def extract_subexprs(self, threshold, callback, allow_new):
super().extract_subexprs(threshold, callback, allow_new)
self.left = callback(self.left,
allow_new and self.left._complicated(threshold))
self.right = callback(self.right,
allow_new and self.right._complicated(threshold))
class ExprShift(ExprBinary):
def __init__(self, left, right):
super().__init__(left, right, max(31, left.rtype))
class ExprLShift(ExprShift):
def __str__(self):
# Avoid the spurious 'u' after the constant
right = self.right
if right.IS_CONST:
if right.value in (1, 2, 3):
return '{} * {}'.format(self.left, 1 << right.value)
return '({} << {})'.format(self.left, right.value)
else:
return '({} << {})'.format(self.left, right)
@utils.cached_property
def optimized(self):
left = self.left
right = self.right
right_const = right.IS_CONST
if right_const and left.IS_CONST:
return ExprConst(self.rtype, left.value << right.value)
# "(a & c1) << c2" ==> (a << c2) & (c1 << c2) (where c2 <= 3)
# This takes advantage of x86's LEA instruction
if right_const and right.value <= 3 and \
left.IS_AND and \
left.right.IS_CONST:
expr_left = ExprLShift(left.left, right)
expr_right = ExprConst(left.right.rtype,
left.right.value << right.value)
return ExprAnd(expr_left, expr_right).optimized
# (cond ? c1 : c2) << c3 ==> (cond ? c1 << c3 : c2 << c3)
if right_const and \
left.IS_COND and \
left.exprT.IS_CONST and \
left.exprF.IS_CONST:
expr = ExprCond(left.cond,
ExprLShift(left.exprT, right),
ExprLShift(left.exprF, right))
return expr.optimized
# (a >> c1) << c2
# (a << (c2 - c1)) & ~((1 << c2) - 1) (c2 > c1)
# (a >> (c1 - c2)) & ~((1 << c2) - 1) (c2 <= c1)
if right_const and \
left.IS_RSHIFT and \
left.right.IS_CONST:
c2 = right.value
c1 = left.right.value
if c2 > c1:
expr = ExprLShift(left.left, ExprConst(32, c2 - c1))
elif c2 == c1:
expr = left.left
else:
expr = ExprRShift(left.left, ExprConst(32, c1 - c2))
and_value = ((1 << c2) - 1) ^ ((1 << expr.rtype) - 1)
expr = ExprAnd(expr, Const(expr.rtype, and_value))
return expr.optimized
# "(a + c1) << c2" ==> (a << c2) + (c1 << c2)
if right_const and \
left.IS_ADD and len(left.exprs) == 1 and \
left.const:
expr_left = ExprLShift(left.exprs[0], right)
expr_right = ExprConst(left.const.rtype,
left.const.value << right.value)
return ExprAdd((expr_left,), expr_right).optimized
return self
class ExprRShift(ExprShift):
def __init__(self, left, right):
# Always logical shift
if left.rtype < 32 or left.rtype == 63:
left = ExprCast(max(32, left.rtype + 1), left)
super().__init__(left, right)
def __str__(self):
# Avoid the spurious 'u' after the constant
right = self.right
if right.IS_CONST:
right_s = str(right.value)
else:
right_s = str(right)
return '({} >> {})'.format(self.left, right_s)
@utils.cached_property
def optimized(self):
'''
>>> expr = (Add(FixedVar(32, 'c'), 30) >> 2)
>>> str(expr.optimized)
'(((c + 2u) >> 2) + 7u)'
>>> expr = (Add(FixedVar(32, 'c'), FixedVar(32, 'd'), -30) >> 2)
>>> str(expr.optimized)
'(((c + d + 2u) >> 2) - 8u)'
'''
left = self.left
right = self.right
right_const = right.IS_CONST
# (a + c1) >> c2
# Convert to ((a + c1 % (1 << c2)) >> c2) + (c1 >> c2).
if right_const and left.IS_ADD and left.const:
ctype = left.const.rtype
c1 = left.const.value
c2 = right.value
if c1 >> c2:
compensation = c1 >> c2
remainder = c1 - (compensation << c2)
if remainder < 0:
compensation += 1
remainder -= 1 << c2
expr = ExprAdd(left.exprs, ExprConst(ctype, remainder))
expr = ExprRShift(expr, ExprConst(32, c2))
expr = ExprAdd((expr,), ExprConst(ctype, compensation))
return expr.optimized
# (a >> c1) >> c2 ==> a >> (c1 + c2)
if right_const and \
left.IS_RSHIFT and \
left.right.IS_CONST:
self.right = right = Add(right, left.right).optimized
self.left = left = left.left
return self
class ExprMul(ExprBinary):
def __str__(self):
return '({} * {})'.format(self.left, self.right)
@utils.cached_property
def optimized(self):
left = self.left
right = self.right
right_const = right.IS_CONST
# Both constants
if right_const and left.IS_CONST:
return ExprConst(self.rtype,
left.value * right.value)
# Put constant on the right side
if not right_const and left.IS_CONST:
self.left, self.right = left, right = right, left
right_const = True
if right_const:
# Strength reduction (* => <<)
rv = right.value
if rv == 0:
return ExprConst(32, 0)
elif rv == 1:
return left
elif (rv > 0) and (rv & (rv - 1)) == 0: # Power of 2
expr = ExprLShift(left, ExprConst(32, rv.bit_length() - 1))
return expr.optimized
# (a + c1) * c2 ==> (a * c2 + c1 * c2)
if left.IS_ADD and len(left.exprs) == 1 and left.const:
expr_left = ExprMul(left.exprs[0], right)
expr_right = ExprMul(left.const, right)
return ExprAdd((expr_left, expr_right), None).optimized
# (cond ? c1 : c2) * c3 ==> (cond ? c1 * c3 : c2 * c3)
if left.IS_COND and \
left.exprT.IS_CONST and \
left.exprF.IS_CONST:
expr = ExprCond(left.cond,
ExprMul(left.exprT, right),
ExprMul(left.exprF, right))
return expr.optimized
# (a & 1) * c ==> (a & 1) ? c : 0
if left.IS_AND and \
left.right.IS_CONST and \
left.right.value == 1:
expr = ExprCond(left, right, ExprConst(self.rtype, 0))
return expr.optimized
return self
class ExprDiv(ExprBinary):
IS_ALSO = 'DIV_MOD',
def __init__(self, left, right):
if left.rtype < 32 or left.rtype == 63:
left = ExprCast(max(32, left.rtype + 1), left)
super().__init__(left, right)
def __str__(self):
return '({} / {})'.format(self.left, self.right)
@utils.cached_property
def optimized(self):
left = self.left
right = self.right
if right.IS_CONST:
rv = right.value
if rv == 0:
raise ZeroDivisionError
elif rv == 1:
return left
elif (rv & (rv - 1)) == 0:
expr = ExprRShift(left, ExprConst(32, rv.bit_length() - 1))
return expr.optimized
return self
class ExprMod(ExprBinary):
IS_ALSO = 'DIV_MOD',
def __str__(self):
return '({} % {})'.format(self.left, self.right)
@utils.cached_property
def optimized(self):
right = self.right
if right.IS_CONST:
value = right.value
if value and (value & (value - 1)) == 0:
return ExprAnd(self.left,
ExprConst(right.rtype, value - 1)).optimized
return self
class ExprAnd(ExprBinary):
def __str__(self):
return '({} & {})'.format(self.left, self.right)
@utils.cached_property
def optimized(self):
left = self.left
right = self.right
right_const = right.IS_CONST
right_value = None
if right_const:
right_value = right.value
# (a + c1) & c2 ==> (a + c1') & c2
# where c1' = c1 with high bits cleared
if right_const and right_value and \
left.IS_ADD and left.const:
rv = right_value
bt = rv.bit_length() + 1
c1 = left.const.value
c1p = c1 & ((1 << bt) - 1)
# If its high bit is set, make it negative
if c1p & (1 << (bt - 1)):
c1p |= ~((1 << bt) - 1)
if c1p != c1:
left = ExprAdd(left.exprs, ExprConst(left.const.rtype, c1p))
self.left = left = left.optimized
# (a & c1) & c2 ==> a & (c1 & c2)
if right_const and \
left.IS_AND and \
left.right.IS_CONST:
c1 = left.right.value
c2 = right_value
expr = ExprAnd(left.left,
Const(max(left.right.rtype, right.rtype), c1 & c2))
return expr.optimized
# (a & 0xff) ==> (uint8_t)a
# (a & 0xffff) ==> (uint16_t)a
if right_const:
# Must cast back
if right_value == 0xff:
expr = ExprCast(self.rtype, ExprCast(8, left))
return expr.optimized
elif right_value == 0xffff:
expr = ExprCast(self.rtype, ExprCast(16, left))
return expr.optimized
return self
class ExprCompare(ExprBinary):
def __init__(self, left, compare, right):
super().__init__(left, right, 31)
self.compare = compare
def __str__(self):
return '({} {} {})'.format(self.left, self.compare, self.right)
@utils.cached_property
def optimized(self):
left = self.left
right = self.right
right_const = right.IS_CONST
# (a >> c1) == c2
# a >= (c2 << c1) && a < ((c2 + 1) << c1)
# unsinged(a - (c2 << c1)) < (1 << c1)
if right_const and self.compare == '==' and \
left.IS_RSHIFT and \
left.left.rtype == 32 and \
left.right.IS_CONST and \
right.rtype == 32:
c1 = left.right.value
c2 = right.value
if ((c2 + 1) << c1) <= 2**32:
expr = ExprAdd((left.left,), ExprConst(32, -(c2 << c1)))
expr = ExprCompare(expr, '<', ExprConst(32, 1 << c1))
return expr.optimized
# (a >> c1) < c2
# a < (c2 << c1)
if right_const and self.compare == '<' and \
left.IS_RSHIFT and \
left.left.rtype == 32 and \
left.right.IS_CONST and \
right.rtype == 32:
c1 = left.right.value
c2 = right.value
if (c2 << c1) < 2**32:
expr = ExprCompare(left.left, '<', ExprConst(32, c2 << c1))
return expr.optimized
return self
class ExprCast(Expr):
def __init__(self, type, value):
self.rtype = type
self.value = value.optimized
def __str__(self):
return '{}({})'.format(type_name(self.rtype),
utils.trim_brackets(str(self.value)))
@property
def children(self):
return self.value,
@utils.cached_property
def optimized(self):
rtype = self.rtype
value = self.value
if value.rtype == rtype:
return value
if value.IS_CAST and rtype <= value.rtype:
return ExprCast(rtype, value.value).optimized
return self
class ExprCond(Expr):
__slots__ = 'cond', 'exprT', 'exprF', 'rtype'
def __init__(self, cond, exprT, exprF):
self.cond = cond.optimized
self.exprT = exprT.optimized
self.exprF = exprF.optimized
self.rtype = max(31, self.exprT.rtype, self.exprF.rtype)
def __str__(self):
return '({} ? {} : {})'.format(self.cond, self.exprT, self.exprF)
@property
def children(self):
return self.cond, self.exprT, self.exprF
def extract_subexprs(self, threshold, callback, allow_new):
# It can be unsafe to evaluate exprT or exprF without first checking
# cond
if not self.cond.IS_VAR:
self.cond.extract_subexprs(threshold, callback, allow_new)
self.cond = callback(
self.cond, allow_new and self.cond._complicated(threshold))
if not allow_new:
self.exprT = callback(self.exprT, False)
self.exprF = callback(self.exprF, False)
self.exprT.extract_subexprs(threshold, callback, False)
self.exprF.extract_subexprs(threshold, callback, False)
class ExprTable(Expr):
def __init__(self, type, name, values, var, offset):
self.rtype = type
self.name = name
self.values = values
self.var = var = var.optimized
self.offset = offset
def __str__(self):
if self.offset > 0:
# Add an extra 'l' so that the constant is absorbed by the
# address of the array
offset_s = '{:#x}'.format(self.offset)
if self.var.rtype < 63:
offset_s += 'l'
return '{}[{} - {}]'.format(
self.name, self.var, offset_s)
elif self.offset < 0:
# Don't add 'l' in this case, to avoid signed/unsigned
# extension problems
return '{}[{} + {:#x}]'.format(self.name,
self.var, -self.offset)
else:
var = utils.trim_brackets(str(self.var))
return '{}[{}]'.format(self.name, var)
def statics(self, vs):
id_ = id(self)
if id_ in vs:
return ''
vs.add(id_)
var_statics = self.var.statics(vs)
if _speedups:
c_array = _speedups.format_c_array(
self.values, self.rtype, self.name)
if c_array is not None:
return var_statics + c_array
res = [var_statics]
res_append = res.append
indlen = len(hex(self.values.size))
maxlen = len(hex(utils.np_max(self.values)))
# I understand this is not the "correct" way to go, but this is
# for performance.
# If I don't care about performance, I could do '{:#0{}x}'.format(v, l)
line_start_format = ' /* {{:#0{}x}} */'.format(indlen).format
value_format = ' {{:#0{}x}},'.format(maxlen).format
line = 'alignas({type}) const {type} {name}[{size:#x}] = {{'.format(
type=type_name(self.rtype), name=self.name, size=self.values.size)
for i, v in enumerate(self.values):
if not (i & 7):
res_append(line + '\n')
line = line_start_format(i)
line += value_format(v)
res_append(line.rstrip(',') + '\n')
res_append('};\n\n')
return ''.join(res)
@property
def children(self):
return self.var,
@utils.cached_property
def optimized(self):
var = self.var
# Absorb constants into offset
if var.IS_ADD and var.const:
self.offset -= var.const.value
self.var = var = ExprAdd(var.exprs, None).optimized
return self
def table_bytes(self, *, type_bytes=type_bytes):
return self.values.size * type_bytes(self.rtype)
def extract_subexprs(self, threshold, callback, allow_new):
super().extract_subexprs(threshold, callback, allow_new)
self.var = callback(self.var,
allow_new and self.var._complicated(threshold))
def _complicated(self, _threshold):
return True
### Factory functions
def exprize(expr, *,
isinstance=isinstance, Expr=Expr, ExprConst=ExprConst):
'''Convert int to ExprConst'''
if isinstance(expr, Expr):
return expr
else:
return ExprConst(32, expr)
FixedVar = ExprFixedVar
TempVar = ExprTempVar
Const = ExprConst
def Add(*in_exprs):
exprs = []
const_exprs = []
for expr in in_exprs:
expr = exprize(expr)
if expr.IS_CONST:
const_exprs.append(expr)
elif expr.IS_ADD:
exprs.extend(expr.exprs)
if expr.const:
const_exprs.append(expr.const)
else:
exprs.append(expr)
const_expr = ExprConst.combine(const_exprs)
if not exprs:
return const_expr or ExprConst(32, 0)
elif len(exprs) == 1 and not const_expr:
return exprs[0]
else:
return ExprAdd(exprs, const_expr)
def Cast(type, value):
return ExprCast(type, exprize(value))
def Cond(cond, exprT, exprF):
return ExprCond(exprize(cond), exprize(exprT), exprize(exprF))
| bsd-3-clause | 631,598,003,262,371,100 | 29.443743 | 79 | 0.522693 | false |
crosswalk-project/crosswalk-test-suite | webapi/tct-csp-w3c-tests/csp-py/csp_ro_style-src_cross-origin.py | 23 | 3064 | def main(request, response):
import simplejson as json
f = file('config.json')
source = f.read()
s = json.JSONDecoder().decode(source)
url1 = "http://" + s['host'] + ":" + str(s['ports']['http'][1])
url2 = "http://" + s['host'] + ":" + str(s['ports']['http'][0])
_CSP = "style-src " + url1
response.headers.set("Content-Security-Policy", _CSP)
response.headers.set("X-Content-Security-Policy", _CSP)
response.headers.set("X-WebKit-CSP", _CSP)
return """<!DOCTYPE html>
<!--
Copyright (c) 2013 Samsung Electronics Co., Ltd.
Licensed under the Apache License, Version 2.0 (the License);
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors:
Ran, Wang <[email protected]>
-->
<html>
<head>
<title>CSP Test: csp_ro_style-src_cross-origin</title>
<link rel="author" title="Samsung" href="http://www.Samsung.com"/>
<link rel="help" href="http://www.w3.org/TR/2012/CR-CSP-20121115/#style-src"/>
<meta name="flags" content=""/>
<meta name="assert" content='style-src """ + url1 + """'/>
<meta charset="utf-8"/>
<script src="../resources/testharness.js"></script>
<script src="../resources/testharnessreport.js"></script>
<link rel="stylesheet" type="text/css" href='""" + url1 + """/tests/csp/support/canvas-index.css'/>
<link rel="stylesheet" type="text/css" href='""" + url2 + """/tests/csp/support/a-green.css'/>
<link rel="stylesheet" type="text/css" href="support/blue-100x100.css"/>
<style>
#test-green {
background-color: green;
}
</style>
</head>
<body>
<div id="log"></div>
<div id="test-blue"></div>
<h3>ext-css:""" + url1 + """/tests/csp/support/canvas-index.css</h3>
<div id="test-ext-a" class="a"></div>
<div id="test-green"></div>
<script>
test(function() {
var div = document.querySelector("#test-ext-a");
var fix = getComputedStyle(div)["color"];
assert_equals(fix, "rgb(0, 128, 0)", "style setted correctly");
}, document.title + "_blocked");
test(function() {
var div = document.querySelector("#test-blue");
var fix = getComputedStyle(div)["backgroundColor"];
assert_equals(fix, "rgb(0, 0, 255)", "style setted correctly");
}, document.title + "_blocked_int");
test(function() {
var div = document.querySelector("#test-green");
var fix = getComputedStyle(div)["backgroundColor"];
assert_equals(fix, "rgb(0, 128, 0)", "style setted correctly");
}, document.title + "_blocked_inline");
</script>
</body>
</html>"""
| bsd-3-clause | 8,381,908,714,418,443,000 | 38.792208 | 103 | 0.615862 | false |
liuwenf/moose | framework/contrib/nsiqcppstyle/rules/RULE_4_1_A_A_use_tab_for_indentation.py | 43 | 1770 | """
Use tabs for indentation.
This rule check if the each line starts with a space.
In addition, it suppresses the violation when the line contains only spaces and tabs.
== Violation ==
void Hello()
{
[SPACE][SPACE]Hello(); <== Violation. Spaces are used for indentation.
}
== Good ==
void Hello()
{
[TAB] <== Don't care if the line is empty
[TAB]Hello(); <== Good.
}
"""
from nsiqcppstyle_rulehelper import *
from nsiqcppstyle_reporter import *
from nsiqcppstyle_rulemanager import *
def RunRule(lexer, line, lineno) :
if not Match("^\s*$", line) :
if Search("^ ", line) :
nsiqcppstyle_reporter.Error(DummyToken(lexer.filename, line, lineno, 0), __name__, "Do not use space for indent")
ruleManager.AddLineRule(RunRule)
###########################################################################################
# Unit Test
###########################################################################################
from nsiqunittest.nsiqcppstyle_unittestbase import *
class testRule(nct):
def setUpRule(self):
ruleManager.AddLineRule(RunRule)
def test1(self):
self.Analyze("test/thisFile.c",
"\tbool CanHave() {\n\t}")
assert not CheckErrorContent(__name__)
def test2(self):
self.Analyze("test/thisFile.c",
"""
class K {
Hello
}""")
assert CheckErrorContent(__name__)
def test3(self):
self.Analyze("test/thisFile.c",
"""
class K {
Hello
}""")
assert not CheckErrorContent(__name__)
def test4(self):
self.Analyze("test/thisFile.c",
"""
/**
* Check for Doxygen Comment. This rule doesn't care about doxygen comment block.
*/
class K {
Hello
}""")
assert not CheckErrorContent(__name__)
| lgpl-2.1 | 899,098,830,497,844,400 | 23.929577 | 125 | 0.566102 | false |
Omegaphora/external_deqp | android/scripts/common.py | 6 | 5274 | # -*- coding: utf-8 -*-
import os
import re
import sys
import shlex
import subprocess
import multiprocessing
class NativeLib:
def __init__ (self, apiVersion, abiVersion):
self.apiVersion = apiVersion
self.abiVersion = abiVersion
def getPlatform ():
if sys.platform.startswith('linux'):
return 'linux'
else:
return sys.platform
def selectByOS (variants):
platform = getPlatform()
if platform in variants:
return variants[platform]
elif 'other' in variants:
return variants['other']
else:
raise Exception("No configuration for '%s'" % platform)
def isExecutable (path):
return os.path.isfile(path) and os.access(path, os.X_OK)
def which (binName):
for path in os.environ['PATH'].split(os.pathsep):
path = path.strip('"')
fullPath = os.path.join(path, binName)
if isExecutable(fullPath):
return fullPath
return None
def isBinaryInPath (binName):
return which(binName) != None
def selectFirstExistingBinary (filenames):
for filename in filenames:
if filename != None and isExecutable(filename):
return filename
return None
def selectFirstExistingDir (paths):
for path in paths:
if path != None and os.path.isdir(path):
return path
return None
def die (msg):
print msg
exit(-1)
def shellquote(s):
return '"%s"' % s.replace('\\', '\\\\').replace('"', '\"').replace('$', '\$').replace('`', '\`')
def execute (commandLine):
args = shlex.split(commandLine)
retcode = subprocess.call(args)
if retcode != 0:
raise Exception("Failed to execute '%s', got %d" % (commandLine, retcode))
def execArgs (args):
retcode = subprocess.call(args)
if retcode != 0:
raise Exception("Failed to execute '%s', got %d" % (str(args), retcode))
class Device:
def __init__(self, serial, product, model, device):
self.serial = serial
self.product = product
self.model = model
self.device = device
def __str__ (self):
return "%s: {product: %s, model: %s, device: %s}" % (self.serial, self.product, self.model, self.device)
def getDevices (adb):
proc = subprocess.Popen([adb, 'devices', '-l'], stdout=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
if proc.returncode != 0:
raise Exception("adb devices -l failed, got %d" % retcode)
ptrn = re.compile(r'^([a-zA-Z0-9]+)\s+.*product:([^\s]+)\s+model:([^\s]+)\s+device:([^\s]+)')
devices = []
for line in stdout.splitlines()[1:]:
if len(line.strip()) == 0:
continue
m = ptrn.match(line)
if m == None:
print "WARNING: Failed to parse device info '%s'" % line
continue
devices.append(Device(m.group(1), m.group(2), m.group(3), m.group(4)))
return devices
def getWin32Generator ():
if which("jom.exe") != None:
return "NMake Makefiles JOM"
else:
return "NMake Makefiles"
def isNinjaSupported ():
return which("ninja") != None
def getUnixGenerator ():
if isNinjaSupported():
return "Ninja"
else:
return "Unix Makefiles"
def getExtraBuildArgs (generator):
if generator == "Unix Makefiles":
return ["--", "-j%d" % multiprocessing.cpu_count()]
else:
return []
NDK_HOST_OS_NAMES = [
"windows",
"windows_x86-64",
"darwin-x86",
"darwin-x86-64",
"linux-x86",
"linux-x86_64"
]
def getNDKHostOsName (ndkPath):
for name in NDK_HOST_OS_NAMES:
if os.path.exists(os.path.join(ndkPath, "prebuilt", name)):
return name
raise Exception("Couldn't determine NDK host OS")
# deqp/android path
ANDROID_DIR = os.path.realpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))
# Build configuration
NATIVE_LIBS = [
# API ABI
NativeLib(13, "armeabi-v7a"), # ARM v7a ABI
NativeLib(13, "x86"), # x86
NativeLib(21, "arm64-v8a"), # ARM64 v8a ABI
]
ANDROID_JAVA_API = "android-13"
NATIVE_LIB_NAME = "libdeqp.so"
# NDK paths
ANDROID_NDK_PATH = selectFirstExistingDir([
os.path.expanduser("~/android-ndk-r10c"),
"C:/android/android-ndk-r10c",
])
ANDROID_NDK_HOST_OS = getNDKHostOsName(ANDROID_NDK_PATH)
ANDROID_NDK_TOOLCHAIN_VERSION = "r10c" # Toolchain file is selected based on this
# Native code build settings
CMAKE_GENERATOR = selectByOS({
'win32': getWin32Generator(),
'other': getUnixGenerator()
})
EXTRA_BUILD_ARGS = getExtraBuildArgs(CMAKE_GENERATOR)
# SDK paths
ANDROID_SDK_PATH = selectFirstExistingDir([
os.path.expanduser("~/android-sdk-linux"),
os.path.expanduser("~/android-sdk-mac_x86"),
"C:/android/android-sdk-windows",
])
ANDROID_BIN = selectFirstExistingBinary([
os.path.join(ANDROID_SDK_PATH, "tools", "android"),
os.path.join(ANDROID_SDK_PATH, "tools", "android.bat"),
which('android'),
])
ADB_BIN = selectFirstExistingBinary([
which('adb'), # \note Prefer adb in path to avoid version issues on dev machines
os.path.join(ANDROID_SDK_PATH, "platform-tools", "adb"),
os.path.join(ANDROID_SDK_PATH, "platform-tools", "adb.exe"),
])
ZIPALIGN_BIN = selectFirstExistingBinary([
os.path.join(ANDROID_SDK_PATH, "tools", "zipalign"),
os.path.join(ANDROID_SDK_PATH, "tools", "zipalign.exe"),
which('zipalign'),
])
JARSIGNER_BIN = which('jarsigner')
# Apache ant
ANT_BIN = selectFirstExistingBinary([
which('ant'),
"C:/android/apache-ant-1.8.4/bin/ant.bat",
"C:/android/apache-ant-1.9.2/bin/ant.bat",
"C:/android/apache-ant-1.9.3/bin/ant.bat",
"C:/android/apache-ant-1.9.4/bin/ant.bat",
])
| apache-2.0 | 100,138,489,912,904,580 | 24.852941 | 106 | 0.675958 | false |
Nikoala/CouchPotatoServer | couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/vidme.py | 36 | 2580 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
float_or_none,
str_to_int,
)
class VidmeIE(InfoExtractor):
_VALID_URL = r'https?://vid\.me/(?:e/)?(?P<id>[\da-zA-Z]+)'
_TEST = {
'url': 'https://vid.me/QNB',
'md5': 'f42d05e7149aeaec5c037b17e5d3dc82',
'info_dict': {
'id': 'QNB',
'ext': 'mp4',
'title': 'Fishing for piranha - the easy way',
'description': 'source: https://www.facebook.com/photo.php?v=312276045600871',
'duration': 119.92,
'timestamp': 1406313244,
'upload_date': '20140725',
'thumbnail': 're:^https?://.*\.jpg',
},
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
video_url = self._html_search_regex(r'<source src="([^"]+)"', webpage, 'video URL')
title = self._og_search_title(webpage)
description = self._og_search_description(webpage, default='')
thumbnail = self._og_search_thumbnail(webpage)
timestamp = int_or_none(self._og_search_property('updated_time', webpage, fatal=False))
width = int_or_none(self._og_search_property('video:width', webpage, fatal=False))
height = int_or_none(self._og_search_property('video:height', webpage, fatal=False))
duration = float_or_none(self._html_search_regex(
r'data-duration="([^"]+)"', webpage, 'duration', fatal=False))
view_count = str_to_int(self._html_search_regex(
r'<span class="video_views">\s*([\d,\.]+)\s*plays?', webpage, 'view count', fatal=False))
like_count = str_to_int(self._html_search_regex(
r'class="score js-video-vote-score"[^>]+data-score="([\d,\.\s]+)">',
webpage, 'like count', fatal=False))
comment_count = str_to_int(self._html_search_regex(
r'class="js-comment-count"[^>]+data-count="([\d,\.\s]+)">',
webpage, 'comment count', fatal=False))
return {
'id': video_id,
'url': video_url,
'title': title,
'description': description,
'thumbnail': thumbnail,
'timestamp': timestamp,
'width': width,
'height': height,
'duration': duration,
'view_count': view_count,
'like_count': like_count,
'comment_count': comment_count,
}
| gpl-3.0 | 8,308,384,121,911,167,000 | 36.941176 | 101 | 0.547674 | false |
lseyesl/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/comments.py | 148 | 2030 | # Copyright (c) 2009 Google Inc. All rights reserved.
# Copyright (c) 2009 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# A tool for automating dealing with bugzilla, posting patches, committing
# patches, etc.
from webkitpy.common.config import urls
def bug_comment_from_svn_revision(svn_revision):
return "Committed r%s: <%s>" % (svn_revision, urls.view_revision_url(svn_revision))
def bug_comment_from_commit_text(scm, commit_text):
svn_revision = scm.svn_revision_from_commit_text(commit_text)
return bug_comment_from_svn_revision(svn_revision)
| bsd-3-clause | -2,630,273,562,701,257,000 | 47.333333 | 87 | 0.773399 | false |
Daksh/sugar-toolkit-gtk3 | src/sugar3/graphics/palettemenu.py | 2 | 7416 | # Copyright 2012 One Laptop Per Child
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from gi.repository import GObject
from gi.repository import Gtk
from sugar3.graphics.icon import Icon
from sugar3.graphics import style
class PaletteMenuBox(Gtk.VBox):
def __init__(self):
Gtk.VBox.__init__(self)
def append_item(self, item_or_widget, horizontal_padding=None,
vertical_padding=None):
item = None
if (isinstance(item_or_widget, PaletteMenuItem) or
isinstance(item_or_widget, PaletteMenuItemSeparator)):
item = item_or_widget
else:
item = self._wrap_widget(item_or_widget, horizontal_padding,
vertical_padding)
self.pack_start(item, False, False, 0)
def _wrap_widget(self, widget, horizontal_padding, vertical_padding):
vbox = Gtk.VBox()
vbox.show()
if horizontal_padding is None:
horizontal_padding = style.DEFAULT_SPACING
if vertical_padding is None:
vertical_padding = style.DEFAULT_SPACING
hbox = Gtk.HBox()
vbox.pack_start(hbox, True, True, vertical_padding)
hbox.show()
hbox.pack_start(widget, True, True, horizontal_padding)
return vbox
class PaletteMenuItemSeparator(Gtk.EventBox):
"""Contains a HSeparator and has the proper height for the menu."""
__gtype_name__ = 'SugarPaletteMenuItemSeparator'
def __init__(self):
Gtk.EventBox.__init__(self)
separator = Gtk.HSeparator()
self.add(separator)
separator.show()
self.set_size_request(-1, style.DEFAULT_SPACING * 2)
class PaletteMenuItem(Gtk.EventBox):
__gtype_name__ = 'SugarPaletteMenuItem'
__gsignals__ = {
'activate': (GObject.SignalFlags.RUN_FIRST, None, [])
}
def __init__(self, text_label=None, icon_name=None, text_maxlen=60,
xo_color=None, file_name=None, accelerator=None):
"""
text_label -- str
a text to display in the menu.
icon_name -- str
the name of a sugar icon to be displayed. Takse precedence
over file_name.
text_maxlen -- int
the desired maximum width of the label, in characters.
By default is 60.
xo_color -- sugar.graphics.XoColor
the color to be applied to the icon.
file_name -- str
the path to a svg file used as icon.
accelerator -- str
a text used to display the keyboard shortcut associated
to the menu.
"""
Gtk.EventBox.__init__(self)
self.set_above_child(True)
self.icon = None
self._hbox = Gtk.HBox()
vbox = Gtk.VBox()
self.add(vbox)
vbox.show()
hbox = Gtk.HBox()
vbox.pack_start(hbox, True, True, style.DEFAULT_PADDING)
hbox.show()
hbox.pack_start(self._hbox, True, True, style.DEFAULT_PADDING)
if icon_name is not None:
self.icon = Icon(icon_name=icon_name,
pixel_size=style.SMALL_ICON_SIZE)
if xo_color is not None:
self.icon.props.xo_color = xo_color
self._hbox.pack_start(self.icon, expand=False, fill=False,
padding=style.DEFAULT_PADDING)
elif file_name is not None:
self.icon = Icon(file=file_name,
pixel_size=style.SMALL_ICON_SIZE)
if xo_color is not None:
self.icon.props.xo_color = xo_color
self._hbox.pack_start(self.icon, expand=False, fill=False,
padding=style.DEFAULT_PADDING)
align = Gtk.Alignment(xalign=0.0, yalign=0.5, xscale=0.0, yscale=0.0)
self.label = Gtk.Label(text_label)
if text_maxlen > 0:
self.label.set_max_width_chars(text_maxlen)
self.label.set_ellipsize(style.ELLIPSIZE_MODE_DEFAULT)
align.add(self.label)
self._hbox.pack_start(align, expand=True, fill=True,
padding=style.DEFAULT_PADDING)
self._accelerator_label = Gtk.AccelLabel('')
if accelerator is not None:
self._accelerator_label.set_text(accelerator)
self._hbox.pack_start(self._accelerator_label, expand=False,
fill=False, padding=style.DEFAULT_PADDING)
self.id_bt_release_cb = self.connect('button-release-event',
self.__button_release_cb)
self.id_enter_notify_cb = self.connect('enter-notify-event',
self.__enter_notify_cb)
self.id_leave_notify_cb = self.connect('leave-notify-event',
self.__leave_notify_cb)
self.show_all()
def __button_release_cb(self, widget, event):
alloc = self.get_allocation()
if 0 < event.x < alloc.width and 0 < event.y < alloc.height:
self.emit('activate')
def __enter_notify_cb(self, widget, event):
self.modify_bg(Gtk.StateType.NORMAL,
style.COLOR_BUTTON_GREY.get_gdk_color())
def __leave_notify_cb(self, widget, event):
self.modify_bg(Gtk.StateType.NORMAL,
style.COLOR_BLACK.get_gdk_color())
def set_label(self, text_label):
text = '<span foreground="%s">' % style.COLOR_WHITE.get_html() + \
text_label + '</span>'
self.label.set_markup(text)
def set_image(self, icon):
self._hbox.pack_start(icon, expand=False, fill=False,
padding=style.DEFAULT_PADDING)
self._hbox.reorder_child(icon, 0)
def set_accelerator(self, text):
self._accelerator_label.set_text(text)
def set_sensitive(self, sensitive):
is_sensitive = bool(not self.get_state_flags() &
Gtk.StateFlags.INSENSITIVE)
if is_sensitive == sensitive:
return
if sensitive:
self.handler_unblock(self.id_bt_release_cb)
self.handler_unblock(self.id_enter_notify_cb)
self.handler_unblock(self.id_leave_notify_cb)
self.unset_state_flags(Gtk.StateFlags.INSENSITIVE)
else:
self.handler_block(self.id_bt_release_cb)
self.handler_block(self.id_enter_notify_cb)
self.handler_block(self.id_leave_notify_cb)
self.set_state_flags(self.get_state_flags() |
Gtk.StateFlags.INSENSITIVE,
clear=True)
| lgpl-2.1 | -2,447,443,035,988,233,000 | 35.352941 | 77 | 0.582255 | false |
Vixionar/django | tests/forms_tests/widget_tests/test_selectmultiple.py | 145 | 4951 | from django.forms import SelectMultiple
from .base import WidgetTest
class SelectMultipleTest(WidgetTest):
widget = SelectMultiple()
numeric_choices = (('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('0', 'extra'))
def test_render_selected(self):
self.check_html(self.widget, 'beatles', ['J'], choices=self.beatles, html=(
"""<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>"""
))
def test_render_multiple_selected(self):
self.check_html(self.widget, 'beatles', ['J', 'P'], choices=self.beatles, html=(
"""<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P" selected="selected">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>"""
))
def test_render_none(self):
"""
If the value is None, none of the options are selected.
"""
self.check_html(self.widget, 'beatles', None, choices=self.beatles, html=(
"""<select multiple="multiple" name="beatles">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>"""
))
def test_render_value_label(self):
"""
If the value corresponds to a label (but not to an option value), none
of the options are selected.
"""
self.check_html(self.widget, 'beatles', ['John'], choices=self.beatles, html=(
"""<select multiple="multiple" name="beatles">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>"""
))
def test_multiple_options_same_value(self):
"""
Multiple options with the same value can be selected (#8103).
"""
self.check_html(self.widget, 'choices', ['0'], choices=self.numeric_choices, html=(
"""<select multiple="multiple" name="choices">
<option value="0" selected="selected">0</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="0" selected="selected">extra</option>
</select>"""
))
def test_multiple_values_invalid(self):
"""
If multiple values are given, but some of them are not valid, the valid
ones are selected.
"""
self.check_html(self.widget, 'beatles', ['J', 'G', 'foo'], choices=self.beatles, html=(
"""<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P">Paul</option>
<option value="G" selected="selected">George</option>
<option value="R">Ringo</option>
</select>"""
))
def test_compare_string(self):
choices = [('1', '1'), ('2', '2'), ('3', '3')]
self.check_html(self.widget, 'nums', [2], choices=choices, html=(
"""<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>"""
))
self.check_html(self.widget, 'nums', ['2'], choices=choices, html=(
"""<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>"""
))
self.check_html(self.widget, 'nums', [2], choices=choices, html=(
"""<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>"""
))
def test_optgroup_select_multiple(self):
widget = SelectMultiple(choices=(
('outer1', 'Outer 1'),
('Group "1"', (('inner1', 'Inner 1'), ('inner2', 'Inner 2'))),
))
self.check_html(widget, 'nestchoice', ['outer1', 'inner2'], html=(
"""<select multiple="multiple" name="nestchoice">
<option value="outer1" selected="selected">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2" selected="selected">Inner 2</option>
</optgroup>
</select>"""
))
| bsd-3-clause | -7,946,617,110,642,536,000 | 38.608 | 95 | 0.531812 | false |
wzbozon/statsmodels | statsmodels/sandbox/km_class.py | 31 | 11748 | #a class for the Kaplan-Meier estimator
from statsmodels.compat.python import range
import numpy as np
from math import sqrt
import matplotlib.pyplot as plt
class KAPLAN_MEIER(object):
def __init__(self, data, timesIn, groupIn, censoringIn):
raise RuntimeError('Newer version of Kaplan-Meier class available in survival2.py')
#store the inputs
self.data = data
self.timesIn = timesIn
self.groupIn = groupIn
self.censoringIn = censoringIn
def fit(self):
#split the data into groups based on the predicting variable
#get a set of all the groups
groups = list(set(self.data[:,self.groupIn]))
#create an empty list to store the data for different groups
groupList = []
#create an empty list for each group and add it to groups
for i in range(len(groups)):
groupList.append([])
#iterate through all the groups in groups
for i in range(len(groups)):
#iterate though the rows of dataArray
for j in range(len(self.data)):
#test if this row has the correct group
if self.data[j,self.groupIn] == groups[i]:
#add the row to groupList
groupList[i].append(self.data[j])
#create an empty list to store the times for each group
timeList = []
#iterate through all the groups
for i in range(len(groupList)):
#create an empty list
times = []
#iterate through all the rows of the group
for j in range(len(groupList[i])):
#get a list of all the times in the group
times.append(groupList[i][j][self.timesIn])
#get a sorted set of the times and store it in timeList
times = list(sorted(set(times)))
timeList.append(times)
#get a list of the number at risk and events at each time
#create an empty list to store the results in
timeCounts = []
#create an empty list to hold points for plotting
points = []
#create a list for points where censoring occurs
censoredPoints = []
#iterate trough each group
for i in range(len(groupList)):
#initialize a variable to estimate the survival function
survival = 1
#initialize a variable to estimate the variance of
#the survival function
varSum = 0
#initialize a counter for the number at risk
riskCounter = len(groupList[i])
#create a list for the counts for this group
counts = []
##create a list for points to plot
x = []
y = []
#iterate through the list of times
for j in range(len(timeList[i])):
if j != 0:
if j == 1:
#add an indicator to tell if the time
#starts a new group
groupInd = 1
#add (0,1) to the list of points
x.append(0)
y.append(1)
#add the point time to the right of that
x.append(timeList[i][j-1])
y.append(1)
#add the point below that at survival
x.append(timeList[i][j-1])
y.append(survival)
#add the survival to y
y.append(survival)
else:
groupInd = 0
#add survival twice to y
y.append(survival)
y.append(survival)
#add the time twice to x
x.append(timeList[i][j-1])
x.append(timeList[i][j-1])
#add each censored time, number of censorings and
#its survival to censoredPoints
censoredPoints.append([timeList[i][j-1],
censoringNum,survival,groupInd])
#add the count to the list
counts.append([timeList[i][j-1],riskCounter,
eventCounter,survival,
sqrt(((survival)**2)*varSum)])
#increment the number at risk
riskCounter += -1*(riskChange)
#initialize a counter for the change in the number at risk
riskChange = 0
#initialize a counter to zero
eventCounter = 0
#intialize a counter to tell when censoring occurs
censoringCounter = 0
censoringNum = 0
#iterate through the observations in each group
for k in range(len(groupList[i])):
#check of the observation has the given time
if (groupList[i][k][self.timesIn]) == (timeList[i][j]):
#increment the number at risk counter
riskChange += 1
#check if this is an event or censoring
if groupList[i][k][self.censoringIn] == 1:
#add 1 to the counter
eventCounter += 1
else:
censoringNum += 1
#check if there are any events at this time
if eventCounter != censoringCounter:
censoringCounter = eventCounter
#calculate the estimate of the survival function
survival *= ((float(riskCounter) -
eventCounter)/(riskCounter))
try:
#calculate the estimate of the variance
varSum += (eventCounter)/((riskCounter)
*(float(riskCounter)-
eventCounter))
except ZeroDivisionError:
varSum = 0
#append the last row to counts
counts.append([timeList[i][len(timeList[i])-1],
riskCounter,eventCounter,survival,
sqrt(((survival)**2)*varSum)])
#add the last time once to x
x.append(timeList[i][len(timeList[i])-1])
x.append(timeList[i][len(timeList[i])-1])
#add the last survival twice to y
y.append(survival)
#y.append(survival)
censoredPoints.append([timeList[i][len(timeList[i])-1],
censoringNum,survival,1])
#add the list for the group to al ist for all the groups
timeCounts.append(np.array(counts))
points.append([x,y])
#returns a list of arrays, where each array has as it columns: the time,
#the number at risk, the number of events, the estimated value of the
#survival function at that time, and the estimated standard error at
#that time, in that order
self.results = timeCounts
self.points = points
self.censoredPoints = censoredPoints
def plot(self):
x = []
#iterate through the groups
for i in range(len(self.points)):
#plot x and y
plt.plot(np.array(self.points[i][0]),np.array(self.points[i][1]))
#create lists of all the x and y values
x += self.points[i][0]
for j in range(len(self.censoredPoints)):
#check if censoring is occuring
if (self.censoredPoints[j][1] != 0):
#if this is the first censored point
if (self.censoredPoints[j][3] == 1) and (j == 0):
#calculate a distance beyond 1 to place it
#so all the points will fit
dx = ((1./((self.censoredPoints[j][1])+1.))
*(float(self.censoredPoints[j][0])))
#iterate through all the censored points at this time
for k in range(self.censoredPoints[j][1]):
#plot a vertical line for censoring
plt.vlines((1+((k+1)*dx)),
self.censoredPoints[j][2]-0.03,
self.censoredPoints[j][2]+0.03)
#if this censored point starts a new group
elif ((self.censoredPoints[j][3] == 1) and
(self.censoredPoints[j-1][3] == 1)):
#calculate a distance beyond 1 to place it
#so all the points will fit
dx = ((1./((self.censoredPoints[j][1])+1.))
*(float(self.censoredPoints[j][0])))
#iterate through all the censored points at this time
for k in range(self.censoredPoints[j][1]):
#plot a vertical line for censoring
plt.vlines((1+((k+1)*dx)),
self.censoredPoints[j][2]-0.03,
self.censoredPoints[j][2]+0.03)
#if this is the last censored point
elif j == (len(self.censoredPoints) - 1):
#calculate a distance beyond the previous time
#so that all the points will fit
dx = ((1./((self.censoredPoints[j][1])+1.))
*(float(self.censoredPoints[j][0])))
#iterate through all the points at this time
for k in range(self.censoredPoints[j][1]):
#plot a vertical line for censoring
plt.vlines((self.censoredPoints[j-1][0]+((k+1)*dx)),
self.censoredPoints[j][2]-0.03,
self.censoredPoints[j][2]+0.03)
#if this is a point in the middle of the group
else:
#calcuate a distance beyond the current time
#to place the point, so they all fit
dx = ((1./((self.censoredPoints[j][1])+1.))
*(float(self.censoredPoints[j+1][0])
- self.censoredPoints[j][0]))
#iterate through all the points at this time
for k in range(self.censoredPoints[j][1]):
#plot a vetical line for censoring
plt.vlines((self.censoredPoints[j][0]+((k+1)*dx)),
self.censoredPoints[j][2]-0.03,
self.censoredPoints[j][2]+0.03)
#set the size of the plot so it extends to the max x and above 1 for y
plt.xlim((0,np.max(x)))
plt.ylim((0,1.05))
#label the axes
plt.xlabel('time')
plt.ylabel('survival')
plt.show()
def show_results(self):
#start a string that will be a table of the results
resultsString = ''
#iterate through all the groups
for i in range(len(self.results)):
#label the group and header
resultsString += ('Group {0}\n\n'.format(i) +
'Time At Risk Events Survival Std. Err\n')
for j in self.results[i]:
#add the results to the string
resultsString += (
'{0:<9d}{1:<12d}{2:<11d}{3:<13.4f}{4:<6.4f}\n'.format(
int(j[0]),int(j[1]),int(j[2]),j[3],j[4]))
print(resultsString)
| bsd-3-clause | -5,926,520,682,033,970,000 | 47.95 | 91 | 0.490637 | false |
pdellaert/ansible | contrib/vault/azure_vault.py | 37 | 23745 | #!/usr/bin/env python
#
# This script borrows a great deal of code from the azure_rm.py dynamic inventory script
# that is packaged with Ansible. This can be found in the Ansible GitHub project at:
# https://github.com/ansible/ansible/blob/devel/contrib/inventory/azure_rm.py
#
# The Azure Dynamic Inventory script was written by:
# Copyright (c) 2016 Matt Davis, <[email protected]>
# Chris Houseknecht, <[email protected]>
# Altered/Added for Vault functionality:
# Austin Hobbs, GitHub: @OxHobbs
'''
Ansible Vault Password with Azure Key Vault Secret Script
=========================================================
This script is designed to be used with Ansible Vault. It provides the
capability to provide this script as the password file to the ansible-vault
command. This script uses the Azure Python SDK. For instruction on installing
the Azure Python SDK see http://azure-sdk-for-python.readthedocs.org/
Authentication
--------------
The order of precedence is command line arguments, environment variables,
and finally the [default] profile found in ~/.azure/credentials for all
authentication parameters.
If using a credentials file, it should be an ini formatted file with one or
more sections, which we refer to as profiles. The script looks for a
[default] section, if a profile is not specified either on the command line
or with an environment variable. The keys in a profile will match the
list of command line arguments below.
For command line arguments and environment variables specify a profile found
in your ~/.azure/credentials file, or a service principal or Active Directory
user.
Command line arguments:
- profile
- client_id
- secret
- subscription_id
- tenant
- ad_user
- password
- cloud_environment
- adfs_authority_url
- vault-name
- secret-name
- secret-version
Environment variables:
- AZURE_PROFILE
- AZURE_CLIENT_ID
- AZURE_SECRET
- AZURE_SUBSCRIPTION_ID
- AZURE_TENANT
- AZURE_AD_USER
- AZURE_PASSWORD
- AZURE_CLOUD_ENVIRONMENT
- AZURE_ADFS_AUTHORITY_URL
- AZURE_VAULT_NAME
- AZURE_VAULT_SECRET_NAME
- AZURE_VAULT_SECRET_VERSION
Vault
-----
The order of precedence of Azure Key Vault Secret information is the same.
Command line arguments, environment variables, and finally the azure_vault.ini
file with the [azure_keyvault] section.
azure_vault.ini (or azure_rm.ini if merged with Azure Dynamic Inventory Script)
------------------------------------------------------------------------------
As mentioned above, you can control execution using environment variables or a .ini file. A sample
azure_vault.ini is included. The name of the .ini file is the basename of the inventory script (in this case
'azure_vault') with a .ini extension. It also assumes the .ini file is alongside the script. To specify
a different path for the .ini file, define the AZURE_VAULT_INI_PATH environment variable:
export AZURE_VAULT_INI_PATH=/path/to/custom.ini
or
export AZURE_VAULT_INI_PATH=[same path as azure_rm.ini if merged]
__NOTE__: If using the azure_rm.py dynamic inventory script, it is possible to use the same .ini
file for both the azure_rm dynamic inventory and the azure_vault password file. Simply add a section
named [azure_keyvault] to the ini file with the following properties: vault_name, secret_name and
secret_version.
Examples:
---------
Validate the vault_pw script with Python
$ python azure_vault.py -n mydjangovault -s vaultpw -v 6b6w7f7252b44eac8ee726b3698009f3
$ python azure_vault.py --vault-name 'mydjangovault' --secret-name 'vaultpw' \
--secret-version 6b6w7f7252b44eac8ee726b3698009f3
Use with a playbook
$ ansible-playbook -i ./azure_rm.py my_playbook.yml --limit galaxy-qa --vault-password-file ./azure_vault.py
Insecure Platform Warning
-------------------------
If you receive InsecurePlatformWarning from urllib3, install the
requests security packages:
pip install requests[security]
author:
- Chris Houseknecht (@chouseknecht)
- Matt Davis (@nitzmahone)
- Austin Hobbs (@OxHobbs)
Company: Ansible by Red Hat, Microsoft
Version: 0.1.0
'''
import argparse
import os
import re
import sys
import inspect
from azure.keyvault import KeyVaultClient
from ansible.module_utils.six.moves import configparser as cp
from os.path import expanduser
import ansible.module_utils.six.moves.urllib.parse as urlparse
HAS_AZURE = True
HAS_AZURE_EXC = None
HAS_AZURE_CLI_CORE = True
CLIError = None
try:
from msrestazure.azure_active_directory import AADTokenCredentials
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_active_directory import MSIAuthentication
from msrestazure import azure_cloud
from azure.mgmt.compute import __version__ as azure_compute_version
from azure.common import AzureMissingResourceHttpError, AzureHttpError
from azure.common.credentials import ServicePrincipalCredentials, UserPassCredentials
from azure.mgmt.network import NetworkManagementClient
from azure.mgmt.resource.resources import ResourceManagementClient
from azure.mgmt.resource.subscriptions import SubscriptionClient
from azure.mgmt.compute import ComputeManagementClient
from adal.authentication_context import AuthenticationContext
except ImportError as exc:
HAS_AZURE_EXC = exc
HAS_AZURE = False
try:
from azure.cli.core.util import CLIError
from azure.common.credentials import get_azure_cli_credentials, get_cli_profile
from azure.common.cloud import get_cli_active_cloud
except ImportError:
HAS_AZURE_CLI_CORE = False
CLIError = Exception
try:
from ansible.release import __version__ as ansible_version
except ImportError:
ansible_version = 'unknown'
AZURE_CREDENTIAL_ENV_MAPPING = dict(
profile='AZURE_PROFILE',
subscription_id='AZURE_SUBSCRIPTION_ID',
client_id='AZURE_CLIENT_ID',
secret='AZURE_SECRET',
tenant='AZURE_TENANT',
ad_user='AZURE_AD_USER',
password='AZURE_PASSWORD',
cloud_environment='AZURE_CLOUD_ENVIRONMENT',
adfs_authority_url='AZURE_ADFS_AUTHORITY_URL'
)
AZURE_VAULT_SETTINGS = dict(
vault_name='AZURE_VAULT_NAME',
secret_name='AZURE_VAULT_SECRET_NAME',
secret_version='AZURE_VAULT_SECRET_VERSION',
)
AZURE_MIN_VERSION = "2.0.0"
ANSIBLE_USER_AGENT = 'Ansible/{0}'.format(ansible_version)
class AzureRM(object):
def __init__(self, args):
self._args = args
self._cloud_environment = None
self._compute_client = None
self._resource_client = None
self._network_client = None
self._adfs_authority_url = None
self._vault_client = None
self._resource = None
self.debug = False
if args.debug:
self.debug = True
self.credentials = self._get_credentials(args)
if not self.credentials:
self.fail("Failed to get credentials. Either pass as parameters, set environment variables, "
"or define a profile in ~/.azure/credentials.")
# if cloud_environment specified, look up/build Cloud object
raw_cloud_env = self.credentials.get('cloud_environment')
if not raw_cloud_env:
self._cloud_environment = azure_cloud.AZURE_PUBLIC_CLOUD # SDK default
else:
# try to look up "well-known" values via the name attribute on azure_cloud members
all_clouds = [x[1] for x in inspect.getmembers(azure_cloud) if isinstance(x[1], azure_cloud.Cloud)]
matched_clouds = [x for x in all_clouds if x.name == raw_cloud_env]
if len(matched_clouds) == 1:
self._cloud_environment = matched_clouds[0]
elif len(matched_clouds) > 1:
self.fail("Azure SDK failure: more than one cloud matched for cloud_environment name '{0}'".format(
raw_cloud_env))
else:
if not urlparse.urlparse(raw_cloud_env).scheme:
self.fail("cloud_environment must be an endpoint discovery URL or one of {0}".format(
[x.name for x in all_clouds]))
try:
self._cloud_environment = azure_cloud.get_cloud_from_metadata_endpoint(raw_cloud_env)
except Exception as e:
self.fail("cloud_environment {0} could not be resolved: {1}".format(raw_cloud_env, e.message))
if self.credentials.get('subscription_id', None) is None:
self.fail("Credentials did not include a subscription_id value.")
self.log("setting subscription_id")
self.subscription_id = self.credentials['subscription_id']
# get authentication authority
# for adfs, user could pass in authority or not.
# for others, use default authority from cloud environment
if self.credentials.get('adfs_authority_url'):
self._adfs_authority_url = self.credentials.get('adfs_authority_url')
else:
self._adfs_authority_url = self._cloud_environment.endpoints.active_directory
# get resource from cloud environment
self._resource = self._cloud_environment.endpoints.active_directory_resource_id
if self.credentials.get('credentials'):
self.azure_credentials = self.credentials.get('credentials')
elif self.credentials.get('client_id') and self.credentials.get('secret') and self.credentials.get('tenant'):
self.azure_credentials = ServicePrincipalCredentials(client_id=self.credentials['client_id'],
secret=self.credentials['secret'],
tenant=self.credentials['tenant'],
cloud_environment=self._cloud_environment)
elif self.credentials.get('ad_user') is not None and \
self.credentials.get('password') is not None and \
self.credentials.get('client_id') is not None and \
self.credentials.get('tenant') is not None:
self.azure_credentials = self.acquire_token_with_username_password(
self._adfs_authority_url,
self._resource,
self.credentials['ad_user'],
self.credentials['password'],
self.credentials['client_id'],
self.credentials['tenant'])
elif self.credentials.get('ad_user') is not None and self.credentials.get('password') is not None:
tenant = self.credentials.get('tenant')
if not tenant:
tenant = 'common'
self.azure_credentials = UserPassCredentials(self.credentials['ad_user'],
self.credentials['password'],
tenant=tenant,
cloud_environment=self._cloud_environment)
else:
self.fail("Failed to authenticate with provided credentials. Some attributes were missing. "
"Credentials must include client_id, secret and tenant or ad_user and password, or "
"ad_user, password, client_id, tenant and adfs_authority_url(optional) for ADFS authentication, "
"or be logged in using AzureCLI.")
def log(self, msg):
if self.debug:
print(msg + u'\n')
def fail(self, msg):
raise Exception(msg)
def _get_profile(self, profile="default"):
path = expanduser("~")
path += "/.azure/credentials"
try:
config = cp.ConfigParser()
config.read(path)
except Exception as exc:
self.fail("Failed to access {0}. Check that the file exists and you have read "
"access. {1}".format(path, str(exc)))
credentials = dict()
for key in AZURE_CREDENTIAL_ENV_MAPPING:
try:
credentials[key] = config.get(profile, key, raw=True)
except Exception:
pass
if credentials.get('client_id') is not None or credentials.get('ad_user') is not None:
return credentials
return None
def _get_env_credentials(self):
env_credentials = dict()
for attribute, env_variable in AZURE_CREDENTIAL_ENV_MAPPING.items():
env_credentials[attribute] = os.environ.get(env_variable, None)
if env_credentials['profile'] is not None:
credentials = self._get_profile(env_credentials['profile'])
return credentials
if env_credentials['client_id'] is not None or env_credentials['ad_user'] is not None:
return env_credentials
return None
def _get_azure_cli_credentials(self):
credentials, subscription_id = get_azure_cli_credentials()
cloud_environment = get_cli_active_cloud()
cli_credentials = {
'credentials': credentials,
'subscription_id': subscription_id,
'cloud_environment': cloud_environment
}
return cli_credentials
def _get_msi_credentials(self, subscription_id_param=None):
credentials = MSIAuthentication()
try:
# try to get the subscription in MSI to test whether MSI is enabled
subscription_client = SubscriptionClient(credentials)
subscription = next(subscription_client.subscriptions.list())
subscription_id = str(subscription.subscription_id)
return {
'credentials': credentials,
'subscription_id': subscription_id_param or subscription_id
}
except Exception as exc:
return None
def _get_credentials(self, params):
# Get authentication credentials.
# Precedence: cmd line parameters-> environment variables-> default profile in ~/.azure/credentials.
self.log('Getting credentials')
arg_credentials = dict()
for attribute, env_variable in AZURE_CREDENTIAL_ENV_MAPPING.items():
arg_credentials[attribute] = getattr(params, attribute)
# try module params
if arg_credentials['profile'] is not None:
self.log('Retrieving credentials with profile parameter.')
credentials = self._get_profile(arg_credentials['profile'])
return credentials
if arg_credentials['client_id'] is not None:
self.log('Received credentials from parameters.')
return arg_credentials
if arg_credentials['ad_user'] is not None:
self.log('Received credentials from parameters.')
return arg_credentials
# try environment
env_credentials = self._get_env_credentials()
if env_credentials:
self.log('Received credentials from env.')
return env_credentials
# try default profile from ~./azure/credentials
default_credentials = self._get_profile()
if default_credentials:
self.log('Retrieved default profile credentials from ~/.azure/credentials.')
return default_credentials
msi_credentials = self._get_msi_credentials(arg_credentials.get('subscription_id'))
if msi_credentials:
self.log('Retrieved credentials from MSI.')
return msi_credentials
try:
if HAS_AZURE_CLI_CORE:
self.log('Retrieving credentials from AzureCLI profile')
cli_credentials = self._get_azure_cli_credentials()
return cli_credentials
except CLIError as ce:
self.log('Error getting AzureCLI profile credentials - {0}'.format(ce))
return None
def acquire_token_with_username_password(self, authority, resource, username, password, client_id, tenant):
authority_uri = authority
if tenant is not None:
authority_uri = authority + '/' + tenant
context = AuthenticationContext(authority_uri)
token_response = context.acquire_token_with_username_password(resource, username, password, client_id)
return AADTokenCredentials(token_response)
def _register(self, key):
try:
# We have to perform the one-time registration here. Otherwise, we receive an error the first
# time we attempt to use the requested client.
resource_client = self.rm_client
resource_client.providers.register(key)
except Exception as exc:
self.log("One-time registration of {0} failed - {1}".format(key, str(exc)))
self.log("You might need to register {0} using an admin account".format(key))
self.log(("To register a provider using the Python CLI: "
"https://docs.microsoft.com/azure/azure-resource-manager/"
"resource-manager-common-deployment-errors#noregisteredproviderfound"))
def get_mgmt_svc_client(self, client_type, base_url, api_version):
client = client_type(self.azure_credentials,
self.subscription_id,
base_url=base_url,
api_version=api_version)
client.config.add_user_agent(ANSIBLE_USER_AGENT)
return client
def get_vault_client(self):
return KeyVaultClient(self.azure_credentials)
def get_vault_suffix(self):
return self._cloud_environment.suffixes.keyvault_dns
@property
def network_client(self):
self.log('Getting network client')
if not self._network_client:
self._network_client = self.get_mgmt_svc_client(NetworkManagementClient,
self._cloud_environment.endpoints.resource_manager,
'2017-06-01')
self._register('Microsoft.Network')
return self._network_client
@property
def rm_client(self):
self.log('Getting resource manager client')
if not self._resource_client:
self._resource_client = self.get_mgmt_svc_client(ResourceManagementClient,
self._cloud_environment.endpoints.resource_manager,
'2017-05-10')
return self._resource_client
@property
def compute_client(self):
self.log('Getting compute client')
if not self._compute_client:
self._compute_client = self.get_mgmt_svc_client(ComputeManagementClient,
self._cloud_environment.endpoints.resource_manager,
'2017-03-30')
self._register('Microsoft.Compute')
return self._compute_client
@property
def vault_client(self):
self.log('Getting the Key Vault client')
if not self._vault_client:
self._vault_client = self.get_vault_client()
return self._vault_client
class AzureKeyVaultSecret:
def __init__(self):
self._args = self._parse_cli_args()
try:
rm = AzureRM(self._args)
except Exception as e:
sys.exit("{0}".format(str(e)))
self._get_vault_settings()
if self._args.vault_name:
self.vault_name = self._args.vault_name
if self._args.secret_name:
self.secret_name = self._args.secret_name
if self._args.secret_version:
self.secret_version = self._args.secret_version
self._vault_suffix = rm.get_vault_suffix()
self._vault_client = rm.vault_client
print(self.get_password_from_vault())
def _parse_cli_args(self):
parser = argparse.ArgumentParser(
description='Obtain the vault password used to secure your Ansilbe secrets'
)
parser.add_argument('-n', '--vault-name', action='store', help='Name of Azure Key Vault')
parser.add_argument('-s', '--secret-name', action='store',
help='Name of the secret stored in Azure Key Vault')
parser.add_argument('-v', '--secret-version', action='store',
help='Version of the secret to be retrieved')
parser.add_argument('--debug', action='store_true', default=False,
help='Send the debug messages to STDOUT')
parser.add_argument('--profile', action='store',
help='Azure profile contained in ~/.azure/credentials')
parser.add_argument('--subscription_id', action='store',
help='Azure Subscription Id')
parser.add_argument('--client_id', action='store',
help='Azure Client Id ')
parser.add_argument('--secret', action='store',
help='Azure Client Secret')
parser.add_argument('--tenant', action='store',
help='Azure Tenant Id')
parser.add_argument('--ad_user', action='store',
help='Active Directory User')
parser.add_argument('--password', action='store',
help='password')
parser.add_argument('--adfs_authority_url', action='store',
help='Azure ADFS authority url')
parser.add_argument('--cloud_environment', action='store',
help='Azure Cloud Environment name or metadata discovery URL')
return parser.parse_args()
def get_password_from_vault(self):
vault_url = 'https://{0}{1}'.format(self.vault_name, self._vault_suffix)
secret = self._vault_client.get_secret(vault_url, self.secret_name, self.secret_version)
return secret.value
def _get_vault_settings(self):
env_settings = self._get_vault_env_settings()
if None not in set(env_settings.values()):
for key in AZURE_VAULT_SETTINGS:
setattr(self, key, env_settings.get(key, None))
else:
file_settings = self._load_vault_settings()
if not file_settings:
return
for key in AZURE_VAULT_SETTINGS:
if file_settings.get(key):
setattr(self, key, file_settings.get(key))
def _get_vault_env_settings(self):
env_settings = dict()
for attribute, env_variable in AZURE_VAULT_SETTINGS.items():
env_settings[attribute] = os.environ.get(env_variable, None)
return env_settings
def _load_vault_settings(self):
basename = os.path.splitext(os.path.basename(__file__))[0]
default_path = os.path.join(os.path.dirname(__file__), (basename + '.ini'))
path = os.path.expanduser(os.path.expandvars(os.environ.get('AZURE_VAULT_INI_PATH', default_path)))
config = None
settings = None
try:
config = cp.ConfigParser()
config.read(path)
except Exception:
pass
if config is not None:
settings = dict()
for key in AZURE_VAULT_SETTINGS:
try:
settings[key] = config.get('azure_keyvault', key, raw=True)
except Exception:
pass
return settings
def main():
if not HAS_AZURE:
sys.exit("The Azure python sdk is not installed (try `pip install 'azure>={0}' --upgrade`) - {1}".format(
AZURE_MIN_VERSION, HAS_AZURE_EXC))
AzureKeyVaultSecret()
if __name__ == '__main__':
main()
| gpl-3.0 | 2,316,225,355,410,499,600 | 39.109797 | 119 | 0.620594 | false |
ypu/tp-qemu | qemu/tests/timerdevice_tscwrite.py | 3 | 1850 | import logging
from autotest.client.shared import error
from autotest.client import utils
@error.context_aware
def run(test, params, env):
"""
Timer device tscwrite test:
1) Check for an appropriate clocksource on host.
2) Boot the guest.
3) Download and compile the newest msr-tools.
4) Execute cmd in guest.
:param test: QEMU test object.
:param params: Dictionary with test parameters.
:param env: Dictionary with the test environment.
"""
error.context("Check for an appropriate clocksource on host", logging.info)
host_cmd = "cat /sys/devices/system/clocksource/"
host_cmd += "clocksource0/current_clocksource"
if not "tsc" in utils.system_output(host_cmd):
raise error.TestNAError("Host must use 'tsc' clocksource")
error.context("Boot the guest", logging.info)
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
timeout = int(params.get("login_timeout", 360))
session = vm.wait_for_login(timeout=timeout)
error.context("Download and compile the newest msr-tools", logging.info)
msr_tools_install_cmd = params["msr_tools_install_cmd"]
session.cmd(msr_tools_install_cmd)
error.context("Execute cmd in guest", logging.info)
cmd = "dmesg -c > /dev/null"
session.cmd(cmd)
date_cmd = "strace date 2>&1 | egrep 'clock_gettime|gettimeofday' | wc -l"
output = session.cmd(date_cmd)
if not '0' in output:
raise error.TestFail("Test failed before run msr tools."
" Output: '%s'" % output)
msr_tools_cmd = params["msr_tools_cmd"]
session.cmd(msr_tools_cmd)
cmd = "dmesg"
session.cmd(cmd)
output = session.cmd(date_cmd)
if not "1" in output:
raise error.TestFail("Test failed after run msr tools."
" Output: '%s'" % output)
| gpl-2.0 | 4,940,656,101,771,483,000 | 32.035714 | 79 | 0.651351 | false |
timcera/hspfbintoolbox | tests/test_catalog.py | 1 | 115314 | # -*- coding: utf-8 -*-
"""
catalog
----------------------------------
Tests for `hspfbintoolbox` module.
"""
import csv
import shlex
import subprocess
import sys
from unittest import TestCase
from pandas.testing import assert_frame_equal
try:
from cStringIO import StringIO
except:
from io import StringIO
import pandas as pd
from hspfbintoolbox import hspfbintoolbox
interval2codemap = {"yearly": 5, "monthly": 4, "daily": 3, "bivl": 2}
def capture(func, *args, **kwds):
sys.stdout = StringIO() # capture output
out = func(*args, **kwds)
out = sys.stdout.getvalue() # release output
try:
out = bytes(out, "utf-8")
except:
pass
return out
def read_unicode_csv(
filename,
delimiter=",",
quotechar='"',
quoting=csv.QUOTE_MINIMAL,
lineterminator="\n",
encoding="utf-8",
):
# Python 3 version
if sys.version_info[0] >= 3:
# Open the file in text mode with given encoding
# Set newline arg to ''
# (see https://docs.python.org/3/library/csv.html)
# Next, get the csv reader, with unicode delimiter and quotechar
csv_reader = csv.reader(
filename,
delimiter=delimiter,
quotechar=quotechar,
quoting=quoting,
lineterminator=lineterminator,
)
# Now, iterate over the (already decoded) csv_reader generator
for row in csv_reader:
yield row
# Python 2 version
else:
# Next, get the csv reader, passing delimiter and quotechar as
# bytestrings rather than unicode
csv_reader = csv.reader(
filename,
delimiter=delimiter.encode(encoding),
quotechar=quotechar.encode(encoding),
quoting=quoting,
lineterminator=lineterminator,
)
# Iterate over the file and decode each string into unicode
for row in csv_reader:
yield [cell.decode(encoding) for cell in row]
class TestDescribe(TestCase):
def setUp(self):
self.catalog = b"""\
LUE , LC,GROUP ,VAR , TC,START ,END ,TC
IMPLND, 11,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 11,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 11,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 11,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 11,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 11,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,UZS , 5,1951 ,2001 ,yearly
"""
ndict = []
rd = read_unicode_csv(StringIO(self.catalog.decode()))
next(rd)
for row in rd:
if len(row) == 0:
continue
nrow = [i.strip() for i in row]
ndict.append(
(nrow[0], int(nrow[1]), nrow[2], nrow[3], interval2codemap[nrow[7]])
)
self.ncatalog = sorted(ndict)
def test_catalog_api(self):
out = hspfbintoolbox.catalog("tests/6b_np1.hbn")
out = [i[:5] for i in out]
self.assertEqual(out, self.ncatalog)
def test_catalog_cli(self):
args = "hspfbintoolbox catalog --tablefmt csv tests/6b_np1.hbn"
args = shlex.split(args)
out = subprocess.Popen(
args, stdout=subprocess.PIPE, stdin=subprocess.PIPE
).communicate()[0]
self.assertEqual(out, self.catalog)
| bsd-3-clause | 8,752,432,432,512,541,000 | 49.821507 | 84 | 0.647059 | false |
rossburton/yocto-autobuilder | lib/python2.7/site-packages/buildbot-0.8.8-py2.7.egg/buildbot/process/users/manual.py | 4 | 9675 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
# this class is known to contain cruft and will be looked at later, so
# no current implementation utilizes it aside from scripts.runner.
from twisted.python import log
from twisted.internet import defer
from twisted.application import service
from buildbot import pbutil
class UsersBase(service.MultiService):
"""
Base class for services that manage users manually. This takes care
of the service.MultiService work needed by all the services that
subclass it.
"""
def __init__(self):
service.MultiService.__init__(self)
self.master = None
def startService(self):
service.MultiService.startService(self)
def stopService(self):
return service.MultiService.stopService(self)
class CommandlineUserManagerPerspective(pbutil.NewCredPerspective):
"""
Perspective registered in buildbot.pbmanager and contains the real
workings of `buildbot user` by working with the database when
perspective_commandline is called.
"""
def __init__(self, master):
self.master = master
def formatResults(self, op, results):
"""
This formats the results of the database operations for printing
back to the caller
@param op: operation to perform (add, remove, update, get)
@type op: string
@param results: results from db queries in perspective_commandline
@type results: list
@returns: string containing formatted results
"""
formatted_results = ""
if op == 'add':
# list, alternating ident, uid
formatted_results += "user(s) added:\n"
for user in results:
if isinstance(user, basestring):
formatted_results += "identifier: %s\n" % user
else:
formatted_results += "uid: %d\n\n" % user
elif op == 'remove':
# list of dictionaries
formatted_results += "user(s) removed:\n"
for user in results:
if user:
formatted_results += "identifier: %s\n" % (user)
elif op == 'update':
# list, alternating ident, None
formatted_results += "user(s) updated:\n"
for user in results:
if user:
formatted_results += "identifier: %s\n" % (user)
elif op == 'get':
# list of dictionaries
formatted_results += "user(s) found:\n"
for user in results:
if user:
for key in user:
if key != 'bb_password':
formatted_results += "%s: %s\n" % (key, user[key])
formatted_results += "\n"
else:
formatted_results += "no match found\n"
return formatted_results
@defer.inlineCallbacks
def perspective_commandline(self, op, bb_username, bb_password, ids, info):
"""
This performs the requested operations from the `buildbot user`
call by calling the proper buildbot.db.users methods based on
the operation. It yields a deferred instance with the results
from the database methods.
@param op: operation to perform (add, remove, update, get)
@type op: string
@param bb_username: username portion of auth credentials
@type bb_username: string
@param bb_password: hashed password portion of auth credentials
@type bb_password: hashed string
@param ids: user identifiers used to find existing users
@type ids: list of strings or None
@param info: type/value pairs for each user that will be added
or updated in the database
@type info: list of dictionaries or None
@returns: results from db.users methods via deferred
"""
log.msg("perspective_commandline called")
results = []
if ids:
for user in ids:
# get identifier, guaranteed to be in user from checks
# done in C{scripts.runner}
uid = yield self.master.db.users.identifierToUid(
identifier=user)
result = None
if op == 'remove':
if uid:
yield self.master.db.users.removeUser(uid)
result = user
else:
log.msg("Unable to find uid for identifier %s" % user)
elif op == 'get':
if uid:
result = yield self.master.db.users.getUser(uid)
else:
log.msg("Unable to find uid for identifier %s" % user)
results.append(result)
else:
for user in info:
# get identifier, guaranteed to be in user from checks
# done in C{scripts.runner}
ident = user.pop('identifier')
uid = yield self.master.db.users.identifierToUid(
identifier=ident)
# if only an identifier was in user, we're updating only
# the bb_username and bb_password.
if not user:
if uid:
result = yield self.master.db.users.updateUser(
uid=uid,
identifier=ident,
bb_username=bb_username,
bb_password=bb_password)
results.append(ident)
else:
log.msg("Unable to find uid for identifier %s"
% user)
else:
# when adding, we update the user after the first attr
once_through = False
for attr in user:
if op == 'update' or once_through:
if uid:
result = yield self.master.db.users.updateUser(
uid=uid,
identifier=ident,
bb_username=bb_username,
bb_password=bb_password,
attr_type=attr,
attr_data=user[attr])
else:
log.msg("Unable to find uid for identifier %s"
% user)
elif op == 'add':
result = yield self.master.db.users.findUserByAttr(
identifier=ident,
attr_type=attr,
attr_data=user[attr])
once_through = True
results.append(ident)
# result is None from updateUser calls
if result:
results.append(result)
uid = result
results = self.formatResults(op, results)
defer.returnValue(results)
class CommandlineUserManager(UsersBase):
"""
Service that runs to set up and register CommandlineUserManagerPerspective
so `buildbot user` calls get to perspective_commandline.
"""
def __init__(self, username=None, passwd=None, port=None):
UsersBase.__init__(self)
assert username and passwd, ("A username and password pair must be given "
"to connect and use `buildbot user`")
self.username = username
self.passwd = passwd
assert port, "A port must be specified for a PB connection"
self.port = port
self.registration = None
def startService(self):
UsersBase.startService(self)
# set up factory and register with buildbot.pbmanager
def factory(mind, username):
return CommandlineUserManagerPerspective(self.master)
self.registration = self.master.pbmanager.register(self.port,
self.username,
self.passwd,
factory)
def stopService(self):
d = defer.maybeDeferred(UsersBase.stopService, self)
def unreg(_):
if self.registration:
return self.registration.unregister()
d.addCallback(unreg)
return d
| gpl-2.0 | -1,914,069,743,502,281,700 | 40.346154 | 82 | 0.514935 | false |
jaap-karssenberg/zim-desktop-wiki | zim/gui/templateeditordialog.py | 1 | 6069 |
# Copyright 2012 Jaap Karssenberg <[email protected]>
from gi.repository import Gtk
from zim.newfs import LocalFile
from zim.newfs.helpers import TrashHelper, TrashNotSupportedError
from zim.config import XDG_DATA_HOME, data_file
from zim.templates import list_template_categories, list_templates
from zim.gui.widgets import Dialog, BrowserTreeView, ScrolledWindow
from zim.gui.applications import open_folder_prompt_create, open_file, edit_file
class TemplateEditorDialog(Dialog):
'''Dialog with a tree of available templates for export and new pages.
Allows edit, delete, and create new templates. Uses external editor.
'''
def __init__(self, parent):
Dialog.__init__(self, parent,
_('Templates'), help='Help:Templates', buttons=Gtk.ButtonsType.CLOSE,
defaultwindowsize=(400, 450))
# T: Dialog title
label = Gtk.Label()
label.set_markup('<b>' + _('Templates') + '</b>')
# T: Section in dialog
label.set_alignment(0.0, 0.5)
self.vbox.pack_start(label, False, True, 0)
hbox = Gtk.HBox()
self.vbox.pack_start(hbox, True, True, 0)
self.view = TemplateListView()
self.view.connect('row-activated', self.on_selection_changed)
hbox.pack_start(ScrolledWindow(self.view), True, True, 0)
vbbox = Gtk.VButtonBox()
vbbox.set_layout(Gtk.ButtonBoxStyle.START)
hbox.pack_start(vbbox, False, True, 0)
view_button = Gtk.Button.new_with_mnemonic(_('_View')) # T: button label
view_button.connect('clicked', self.on_view)
copy_button = Gtk.Button.new_with_mnemonic(_('_Copy')) # T: Button label
copy_button.connect('clicked', self.on_copy)
edit_button = Gtk.Button.new_with_mnemonic(_('_Edit')) # T: Button label
edit_button.connect('clicked', self.on_edit)
delete_button = Gtk.Button.new_with_mnemonic(_('_Remove')) # T: Button label
delete_button.connect('clicked', self.on_delete)
for b in (view_button, copy_button, edit_button, delete_button):
b.set_alignment(0.0, 0.5)
vbbox.add(b)
browse_button = Gtk.Button.new_with_mnemonic(_('Browse')) # T: button label
browse_button.connect('clicked', self.on_browse)
self.add_extra_button(browse_button)
self._buttonbox = vbbox
self._delete_button = delete_button
self.on_selection_changed()
## Same button appears in export dialog
url_button = Gtk.LinkButton(
'https://zim-wiki.org/more_templates.html',
_('Get more templates online') # T: label for button with URL
)
self.vbox.pack_start(url_button, False, True, 0)
def on_selection_changed(self, *a):
# Set sensitivity of the buttons
# All insensitive if category (folder) is selected
# Delete insensitive if only a default
custom, default = self.view.get_selected()
for button in self._buttonbox.get_children():
button.set_sensitive(custom is not None)
if custom is None:
return
if not custom.exists():
self._delete_button.set_sensitive(False)
def on_view(self, *a):
# Open the file, without waiting for editor to return
custom, default = self.view.get_selected()
if custom is None:
return # Should not have been sensitive
if custom.exists():
open_file(self, custom)
else:
assert default and default.exists()
open_file(self, default)
def on_copy(self, *a):
# Create a new template in this category
custom, default = self.view.get_selected()
if custom is None:
return # Should not have been sensitive
if custom.exists():
source = custom
else:
assert default and default.exists()
source = default
name = PromptNameDialog(self).run()
assert name is not None
_, ext = custom.basename.rsplit('.', 1)
basename = name + '.' + ext
newfile = custom.dir.file(basename)
source.copyto(newfile)
self.view.refresh()
def on_edit(self, *a):
custom, default = self.view.get_selected()
if custom is None:
return # Should not have been sensitive
if not custom.exists():
# Copy default
default.copyto(custom)
edit_file(self, custom, istextfile=True)
self.view.refresh()
def on_delete(self, *a):
# Only delete custom, may result in reset to default
custom, default = self.view.get_selected()
if custom is None or not custom.exists():
return # Should not have been sensitive
try:
TrashHelper().trash(LocalFile(custom.path))
except TrashNotSupportedError:
# TODO warnings
custom.remove()
self.view.refresh()
def on_browse(self, *a):
dir = XDG_DATA_HOME.subdir(('zim', 'templates'))
open_folder_prompt_create(self, dir)
class PromptNameDialog(Dialog):
def __init__(self, parent):
Dialog.__init__(self, parent, _('Copy Template')) # T: Dialog title
self.add_form([
('name', 'string', _('Name')),
# T: Input label for the new name when copying a template
])
def do_response_ok(self):
self.result = self.form['name']
if self.result:
return True
class TemplateListView(BrowserTreeView):
BASENAME_COL = 0
FILE_COL = 1
DEFAULT_COL = 2
def __init__(self):
BrowserTreeView.__init__(self)
model = Gtk.TreeStore(str, object, object)
# BASENAME_COL, FILE_COL, DEFAULT_COL
self.set_model(model)
self.set_headers_visible(False)
cell_renderer = Gtk.CellRendererText()
column = Gtk.TreeViewColumn('_template_', cell_renderer, text=self.BASENAME_COL)
self.append_column(column)
self.refresh()
def get_selected(self):
# Returns (base, default file) or (None, None)
model, iter = self.get_selection().get_selected()
if model is None or iter is None:
return None, None
else:
return model[iter][self.FILE_COL], model[iter][self.DEFAULT_COL]
def select(self, path):
self.get_selection().select_path(path)
def refresh(self):
model = self.get_model()
model.clear()
for category in list_template_categories():
parent = model.append(None, (category, None, None))
for name, basename in list_templates(category):
base = XDG_DATA_HOME.file(('zim', 'templates', category, basename))
default = data_file(('templates', category, basename)) # None if not existing
#~ print('>>>', name, base, default)
model.append(parent, (name, base, default))
self.expand_all()
| gpl-2.0 | 7,698,457,529,109,229,000 | 28.038278 | 82 | 0.697644 | false |
joeythesaint/yocto-autobuilder | lib/python2.7/site-packages/Twisted-12.2.0-py2.7-linux-x86_64.egg/twisted/test/test_dirdbm.py | 41 | 5859 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test cases for dirdbm module.
"""
import os, shutil, glob
from twisted.trial import unittest
from twisted.persisted import dirdbm
class DirDbmTestCase(unittest.TestCase):
def setUp(self):
self.path = self.mktemp()
self.dbm = dirdbm.open(self.path)
self.items = (('abc', 'foo'), ('/lalal', '\000\001'), ('\000\012', 'baz'))
def testAll(self):
k = "//==".decode("base64")
self.dbm[k] = "a"
self.dbm[k] = "a"
self.assertEqual(self.dbm[k], "a")
def testRebuildInteraction(self):
from twisted.persisted import dirdbm
from twisted.python import rebuild
s = dirdbm.Shelf('dirdbm.rebuild.test')
s['key'] = 'value'
rebuild.rebuild(dirdbm)
# print s['key']
def testDbm(self):
d = self.dbm
# insert keys
keys = []
values = set()
for k, v in self.items:
d[k] = v
keys.append(k)
values.add(v)
keys.sort()
# check they exist
for k, v in self.items:
assert d.has_key(k), "has_key() failed"
assert d[k] == v, "database has wrong value"
# check non existent key
try:
d["XXX"]
except KeyError:
pass
else:
assert 0, "didn't raise KeyError on non-existent key"
# check keys(), values() and items()
dbkeys = list(d.keys())
dbvalues = set(d.values())
dbitems = set(d.items())
dbkeys.sort()
items = set(self.items)
assert keys == dbkeys, ".keys() output didn't match: %s != %s" % (repr(keys), repr(dbkeys))
assert values == dbvalues, ".values() output didn't match: %s != %s" % (repr(values), repr(dbvalues))
assert items == dbitems, "items() didn't match: %s != %s" % (repr(items), repr(dbitems))
copyPath = self.mktemp()
d2 = d.copyTo(copyPath)
copykeys = list(d.keys())
copyvalues = set(d.values())
copyitems = set(d.items())
copykeys.sort()
assert dbkeys == copykeys, ".copyTo().keys() didn't match: %s != %s" % (repr(dbkeys), repr(copykeys))
assert dbvalues == copyvalues, ".copyTo().values() didn't match: %s != %s" % (repr(dbvalues), repr(copyvalues))
assert dbitems == copyitems, ".copyTo().items() didn't match: %s != %s" % (repr(dbkeys), repr(copyitems))
d2.clear()
assert len(d2.keys()) == len(d2.values()) == len(d2.items()) == 0, ".clear() failed"
shutil.rmtree(copyPath)
# delete items
for k, v in self.items:
del d[k]
assert not d.has_key(k), "has_key() even though we deleted it"
assert len(d.keys()) == 0, "database has keys"
assert len(d.values()) == 0, "database has values"
assert len(d.items()) == 0, "database has items"
def testModificationTime(self):
import time
# the mtime value for files comes from a different place than the
# gettimeofday() system call. On linux, gettimeofday() can be
# slightly ahead (due to clock drift which gettimeofday() takes into
# account but which open()/write()/close() do not), and if we are
# close to the edge of the next second, time.time() can give a value
# which is larger than the mtime which results from a subsequent
# write(). I consider this a kernel bug, but it is beyond the scope
# of this test. Thus we keep the range of acceptability to 3 seconds time.
# -warner
self.dbm["k"] = "v"
self.assert_(abs(time.time() - self.dbm.getModificationTime("k")) <= 3)
def testRecovery(self):
"""DirDBM: test recovery from directory after a faked crash"""
k = self.dbm._encode("key1")
f = open(os.path.join(self.path, k + ".rpl"), "wb")
f.write("value")
f.close()
k2 = self.dbm._encode("key2")
f = open(os.path.join(self.path, k2), "wb")
f.write("correct")
f.close()
f = open(os.path.join(self.path, k2 + ".rpl"), "wb")
f.write("wrong")
f.close()
f = open(os.path.join(self.path, "aa.new"), "wb")
f.write("deleted")
f.close()
dbm = dirdbm.DirDBM(self.path)
assert dbm["key1"] == "value"
assert dbm["key2"] == "correct"
assert not glob.glob(os.path.join(self.path, "*.new"))
assert not glob.glob(os.path.join(self.path, "*.rpl"))
def test_nonStringKeys(self):
"""
L{dirdbm.DirDBM} operations only support string keys: other types
should raise a C{AssertionError}. This really ought to be a
C{TypeError}, but it'll stay like this for backward compatibility.
"""
self.assertRaises(AssertionError, self.dbm.__setitem__, 2, "3")
try:
self.assertRaises(AssertionError, self.dbm.__setitem__, "2", 3)
except unittest.FailTest:
# dirdbm.Shelf.__setitem__ supports non-string values
self.assertIsInstance(self.dbm, dirdbm.Shelf)
self.assertRaises(AssertionError, self.dbm.__getitem__, 2)
self.assertRaises(AssertionError, self.dbm.__delitem__, 2)
self.assertRaises(AssertionError, self.dbm.has_key, 2)
self.assertRaises(AssertionError, self.dbm.__contains__, 2)
self.assertRaises(AssertionError, self.dbm.getModificationTime, 2)
class ShelfTestCase(DirDbmTestCase):
def setUp(self):
self.path = self.mktemp()
self.dbm = dirdbm.Shelf(self.path)
self.items = (('abc', 'foo'), ('/lalal', '\000\001'), ('\000\012', 'baz'),
('int', 12), ('float', 12.0), ('tuple', (None, 12)))
testCases = [DirDbmTestCase, ShelfTestCase]
| gpl-2.0 | -1,985,087,264,942,400,000 | 33.464706 | 119 | 0.569551 | false |
colinnewell/odoo | addons/website/tests/test_crawl.py | 251 | 3415 | # -*- coding: utf-8 -*-
import logging
import urlparse
import time
import lxml.html
import openerp
import re
_logger = logging.getLogger(__name__)
class Crawler(openerp.tests.HttpCase):
""" Test suite crawling an openerp CMS instance and checking that all
internal links lead to a 200 response.
If a username and a password are provided, authenticates the user before
starting the crawl
"""
at_install = False
post_install = True
def crawl(self, url, seen=None, msg=''):
if seen == None:
seen = set()
url_slug = re.sub(r"[/](([^/=?&]+-)?[0-9]+)([/]|$)", '/<slug>/', url)
url_slug = re.sub(r"([^/=?&]+)=[^/=?&]+", '\g<1>=param', url_slug)
if url_slug in seen:
return seen
else:
seen.add(url_slug)
_logger.info("%s %s", msg, url)
r = self.url_open(url)
code = r.getcode()
self.assertIn( code, xrange(200, 300), "%s Fetching %s returned error response (%d)" % (msg, url, code))
if r.info().gettype() == 'text/html':
doc = lxml.html.fromstring(r.read())
for link in doc.xpath('//a[@href]'):
href = link.get('href')
parts = urlparse.urlsplit(href)
# href with any fragment removed
href = urlparse.urlunsplit((
parts.scheme,
parts.netloc,
parts.path,
parts.query,
''
))
# FIXME: handle relative link (not parts.path.startswith /)
if parts.netloc or \
not parts.path.startswith('/') or \
parts.path == '/web' or\
parts.path.startswith('/web/') or \
parts.path.startswith('/en_US/') or \
(parts.scheme and parts.scheme not in ('http', 'https')):
continue
self.crawl(href, seen, msg)
return seen
def test_10_crawl_public(self):
t0 = time.time()
t0_sql = self.registry.test_cr.sql_log_count
seen = self.crawl('/', msg='Anonymous Coward')
count = len(seen)
duration = time.time() - t0
sql = self.registry.test_cr.sql_log_count - t0_sql
_logger.log(25, "public crawled %s urls in %.2fs %s queries, %.3fs %.2fq per request, ", count, duration, sql, duration/count, float(sql)/count)
def test_20_crawl_demo(self):
t0 = time.time()
t0_sql = self.registry.test_cr.sql_log_count
self.authenticate('demo', 'demo')
seen = self.crawl('/', msg='demo')
count = len(seen)
duration = time.time() - t0
sql = self.registry.test_cr.sql_log_count - t0_sql
_logger.log(25, "demo crawled %s urls in %.2fs %s queries, %.3fs %.2fq per request", count, duration, sql, duration/count, float(sql)/count)
def test_30_crawl_admin(self):
t0 = time.time()
t0_sql = self.registry.test_cr.sql_log_count
self.authenticate('admin', 'admin')
seen = self.crawl('/', msg='admin')
count = len(seen)
duration = time.time() - t0
sql = self.registry.test_cr.sql_log_count - t0_sql
_logger.log(25, "admin crawled %s urls in %.2fs %s queries, %.3fs %.2fq per request", count, duration, sql, duration/count, float(sql)/count)
| agpl-3.0 | 5,025,868,008,489,267,000 | 34.572917 | 152 | 0.534407 | false |
drduh/config | lighttpd/upload.py | 1 | 1501 | #!/usr/bin/env python3
# https://github.com/drduh/config/blob/master/lighttpd/upload.py
# Simple file uploader
# Put into /var/www/cgi-bin/, make executable and enable CGI
import cgi
import os
CHUNK_SIZE = 100000
UPLOAD = "/var/www/upload/"
HEADER = """
<html><head><title>%s</title>
<style type="text/css">
body {
background-color: #002b36;
color: #839496;
font-family: "Open Sans", "Helvetica Neue",
"Helvetica", "Arial", "sans-serif";
}
div {
background-color: #073642;
border-radius: 0.5em;
margin: 1em auto;
padding: 2em;
width: 600px;
}
h1 {
font-size: 2em;
padding-bottom: 1em;
}
</style>
</head><body><div>"""
ERROR = """
<h1>Error: %s</h1>
</div></body></html>"""
SUCCESS = """
<h1>Saved <a href="../upload/%s">%s</a></h1>
<h2><a href="../index.html">Upload another file</a></h2>
<h2><a href="../upload/">Download files</a></h2>
</div></body></html>"""
def main():
"""File uploader static pages and form handler."""
print(HEADER % "File upload")
form = cgi.FieldStorage()
ff = form["file"]
fl = ff.file
fn = ff.filename
if not fn:
print(ERROR % "No file selected")
return
with open(
os.path.join(
UPLOAD, os.path.basename(fn)), "wb") as out:
while True:
content = fl.read(CHUNK_SIZE)
if not content:
break
out.write(content)
print(SUCCESS % (fn, fn))
if __name__ == "__main__":
main()
| mit | 8,360,567,450,956,556,000 | 20.753623 | 64 | 0.567622 | false |
switowski/invenio | invenio/legacy/ckeditor/connector.py | 13 | 5954 | # -*- coding: utf-8 -*-
# Comments and reviews for records.
# This file is part of Invenio.
# Copyright (C) 2008, 2010, 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
Invenio implementation of the connector to CKEditor for file upload.
This is heavily borrowed from FCKeditor 'upload.py' sample connector.
"""
import os
import re
from invenio.legacy.bibdocfile.api import decompose_file, propose_next_docname
allowed_extensions = {}
allowed_extensions['File'] = ['7z','aiff','asf','avi','bmp','csv','doc','fla','flv','gif','gz','gzip','jpeg','jpg','mid','mov','mp3','mp4','mpc','mpeg','mpg','ods','odt','pdf','png','ppt','pxd','qt','ram','rar','rm','rmi','rmvb','rtf','sdc','sitd','swf','sxc','sxw','tar','tgz','tif','tiff','txt','vsd','wav','wma','wmv','xls','xml','zip']
allowed_extensions['Image'] = ['bmp','gif','jpeg','jpg','png']
allowed_extensions['Flash'] = ['swf','flv']
allowed_extensions['Media'] = ['aiff','asf','avi','bmp','fla', 'flv','gif','jpeg','jpg','mid','mov','mp3','mp4','mpc','mpeg','mpg','png','qt','ram','rm','rmi','rmvb','swf','tif','tiff','wav','wma','wmv']
default_allowed_types = ['File', 'Image', 'Flash', 'Media']
def process_CKEditor_upload(form, uid, user_files_path, user_files_absolute_path,
recid=None, allowed_types=default_allowed_types):
"""
Process a file upload request.
@param form: the form as in req object.
@type form: dict
@param uid: the user ID of the user uploading the file.
@type uid: int
@param user_files_path: the base URL where the file can be
accessed from the web after upload.
Note that you have to implement your own handler to stream the files from the directory
C{user_files_absolute_path} if you set this value.
@type user_files_path: string
@param user_files_absolute_path: the base path on the server where
the files should be saved.
Eg:C{%(CFG_PREFIX)s/var/data/comments/%(recid)s/%(uid)s}
@type user_files_absolute_path: string
@param recid: the record ID for which we upload a file. Leave None if not relevant.
@type recid: int
@param allowed_types: types allowed for uploading. These
are supported by CKEditor: ['File', 'Image', 'Flash', 'Media']
@type allowed_types: list of strings
@return: (msg, uploaded_file_path, uploaded_file_name, uploaded_file_url, callback_function)
"""
msg = ''
filename = ''
formfile = None
uploaded_file_path = ''
user_files_path = ''
for key, formfields in form.items():
if key != 'upload':
continue
if hasattr(formfields, "filename") and formfields.filename:
# We have found our file
filename = formfields.filename
formfile = formfields.file
break
can_upload_file_p = False
if not form['type'] in allowed_types:
# Is the type sent through the form ok?
msg = 'You are not allowed to upload a file of this type'
else:
# Is user allowed to upload such file extension?
basedir, name, extension = decompose_file(filename)
extension = extension[1:] # strip leading dot
if extension in allowed_extensions.get(form['type'], []):
can_upload_file_p = True
if not can_upload_file_p:
msg = 'You are not allowed to upload a file of this type'
elif filename and formfile:
## Before saving the file to disk, wash the filename (in particular
## washing away UNIX and Windows (e.g. DFS) paths):
filename = os.path.basename(filename.split('\\')[-1])
# Remove \ / | : ? *
filename = re.sub ( '\\\\|\\/|\\||\\:|\\?|\\*|"|<|>|[\x00-\x1f\x7f-\x9f]/', '_', filename)
filename = filename.strip()
if filename != "":
# Check that file does not already exist
n = 1
while os.path.exists(os.path.join(user_files_absolute_path, filename)):
basedir, name, extension = decompose_file(filename)
new_name = propose_next_docname(name)
filename = new_name + extension
# This may be dangerous if the file size is bigger than the available memory
fp = open(os.path.join(user_files_absolute_path, filename), "w")
fp.write(formfile.read())
fp.close()
uploaded_file_path = os.path.join(user_files_absolute_path, filename)
uploaded_file_name = filename
return (msg, uploaded_file_path, filename, user_files_path, form['CKEditorFuncNum'])
def send_response(req, msg, fileurl, callback_function):
"""
Send a response to the CKEdtior after a file upload.
@param req: the request object
@param msg: the message to send to the user
@param fileurl: the URL where the newly uploaded file can be found, if any
@param callback_function: a value returned when calling C{process_CKEditor_upload()}
"""
req.content_type = 'text/html'
req.send_http_header()
req.write('''<html><body><script type="text/javascript">window.parent.CKEDITOR.tools.callFunction(%(function_number)s, '%(url)s', '%(msg)s')</script></body></html>''' % \
{'function_number': callback_function,
'url': fileurl,
'msg': msg.replace("'", "\\'")})
| gpl-2.0 | 1,420,174,824,186,686,000 | 44.106061 | 339 | 0.634699 | false |
Lcaracol/ideasbox.lan | ideasbox/library/models.py | 1 | 3341 | from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
from taggit.managers import TaggableManager
from ideasbox.models import TimeStampedModel
from ideasbox.search.models import SearchableQuerySet, SearchMixin
class BookQuerySet(SearchableQuerySet, models.QuerySet):
def available(self):
return self.filter(specimens__isnull=False).distinct()
class Book(SearchMixin, TimeStampedModel):
OTHER = 99
SECTION_CHOICES = (
(1, _('digital')),
(2, _('children - cartoons')),
(3, _('children - novels')),
(10, _('children - poetry')),
(11, _('children - theatre')),
(4, _('children - documentary')),
(5, _('children - comics')),
(6, _('adults - novels')),
(12, _('adults - poetry')),
(13, _('adults - theatre')),
(7, _('adults - documentary')),
(8, _('adults - comics')),
(9, _('game')),
(OTHER, _('other')),
)
# We allow ISBN to be null, but when it is set it needs to be unique.
isbn = models.CharField(max_length=40, unique=True, null=True, blank=True)
authors = models.CharField(_('authors'), max_length=300, blank=True)
serie = models.CharField(_('serie'), max_length=300, blank=True)
title = models.CharField(_('title'), max_length=300)
subtitle = models.CharField(_('subtitle'), max_length=300, blank=True)
summary = models.TextField(_('summary'), blank=True)
publisher = models.CharField(_('publisher'), max_length=100, blank=True)
section = models.PositiveSmallIntegerField(_('section'),
choices=SECTION_CHOICES)
lang = models.CharField(_('Language'), max_length=10,
choices=settings.LANGUAGES)
cover = models.ImageField(_('cover'), upload_to='library/cover',
blank=True)
objects = BookQuerySet.as_manager()
tags = TaggableManager(blank=True)
class Meta:
ordering = ['title']
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('library:book_detail', kwargs={'pk': self.pk})
@property
def index_strings(self):
return (self.title, self.isbn, self.authors, self.subtitle,
self.summary, self.serie, u' '.join(self.tags.names()))
class BookSpecimen(TimeStampedModel):
book = models.ForeignKey(Book, related_name='specimens')
serial = models.CharField(_('serial'), max_length=40, unique=True,
blank=True, null=True)
location = models.CharField(_('location'), max_length=300, blank=True)
remarks = models.TextField(_('remarks'), blank=True)
file = models.FileField(_('digital file'), upload_to='library/digital',
blank=True)
@property
def is_digital(self):
return bool(self.file)
def __unicode__(self):
if self.is_digital:
# serial is null for digital specimens.
return u'Digital specimen of "{0}"'.format(self.book)
return u'Specimen {0} of "{1}"'.format(self.serial, self.book)
def get_absolute_url(self):
return reverse('library:book_detail', kwargs={'pk': self.book.pk})
| mit | 5,770,914,993,431,909,000 | 35.315217 | 78 | 0.6085 | false |
nvoron23/socialite | test/test_attrs.py | 3 | 1423 | """
Testing attribute/function access in a query.
"""
import unittest
class TesetAttrs(unittest.TestCase):
def __init__(self, methodName='runTest'):
unittest.TestCase.__init__(self, methodName)
`Foo(int i, Avg avg) groupby(1).
Bar(int i).`
def setUp(self):
`clear Foo.
clear Bar.`
def test_field(self):
`Foo(i, $avg(n)) :- i=$range(0, 10), n=$range(1, 4).
Bar(i) :- Foo(0, avg), i=(int)avg.value.`
a = list(`Bar(a)`)[0]
self.assertTrue(a==2)
def test_func(self):
`Bar(i) :- i=(int)$Math.ceil(4.2).`
a = list(`Bar(a)`)[0]
self.assertTrue(a==5)
def test_str(self):
`Qux(String w) indexby w.
Qux(w) :- (w, unused)=$Str.split("qux unused trailing strs.. ", " ").`
w = list(`Qux(w)`)[0]
self.assertTrue(w=='qux')
def test_exception1(self):
try:
`Foo(i, $avg(n)) :- i=$range(0, 10), n=$range(1, 4).
Bar(i) :- Foo(0, avg), i=(int)avg.XXX .`
except SociaLiteException:
pass
else:
self.fail("Expected exception is not raised")
def test_exception2(self):
try:
`Bar(i) :- i=(int)$Math.XXX(4.2).`
except SociaLiteException:
pass
else:
self.fail("Expected exception is not raised")
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -5,227,690,993,091,358,000 | 25.849057 | 81 | 0.505271 | false |
joshwalawender/POCS | pocs/scheduler/field.py | 2 | 1797 | from astroplan import FixedTarget
from astropy.coordinates import SkyCoord
from pocs import PanBase
class Field(FixedTarget, PanBase):
def __init__(self, name, position, equinox='J2000', **kwargs):
""" An object representing an area to be observed
A `Field` corresponds to an `~astroplan.ObservingBlock` and contains information
about the center of the field (represented by an `astroplan.FixedTarget`).
Arguments:
name {str} -- Name of the field, typically the name of object at center `position`
position {str} -- Center of field, can be anything accepted by `~astropy.coordinates.SkyCoord`
**kwargs {dict} -- Additional keywords to be passed to `astroplan.ObservingBlock`
"""
PanBase.__init__(self)
super().__init__(SkyCoord(position, equinox=equinox, frame='icrs'), name=name, **kwargs)
self._field_name = self.name.title().replace(' ', '').replace('-', '')
##################################################################################################
# Properties
##################################################################################################
@property
def field_name(self):
""" Flattened field name appropriate for paths """
return self._field_name
##################################################################################################
# Methods
##################################################################################################
##################################################################################################
# Private Methods
##################################################################################################
def __str__(self):
return self.name
| mit | -8,694,060,924,666,827,000 | 36.4375 | 106 | 0.432387 | false |
rdblue/Impala | testdata/bin/wait-for-hiveserver2.py | 14 | 3445 | #!/usr/bin/env impala-python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This script waits for the Hive HiveServer2 service to become available by attempting
# to create a new session until the session creation succeeds, or a timeout is reached.
# TODO: Consider combining this with wait-for-metastore.py. A TCLIService client
# can perhaps also talk to the metastore.
import time
import getpass
from optparse import OptionParser
from tests.util.thrift_util import create_transport
# Imports required for HiveServer2 Client
from cli_service import LegacyTCLIService
from thrift.transport import TTransport, TSocket
from thrift.protocol import TBinaryProtocol
parser = OptionParser()
parser.add_option("--hs2_hostport", dest="hs2_hostport",
default="localhost:11050", help="HiveServer2 hostport to wait for.")
parser.add_option("--transport", dest="transport", default="buffered",
help="Transport to use for connecting to HiveServer2. Valid values: "
"'buffered', 'kerberos', 'plain_sasl'.")
options, args = parser.parse_args()
hs2_host, hs2_port = options.hs2_hostport.split(':')
if options.transport == "plain_sasl":
# Here we supply a bogus username of "foo" and a bogus password of "bar".
# We just have to supply *something*, else HS2 will block waiting for user
# input. Any bogus username and password are accepted.
hs2_transport = create_transport(hs2_host, hs2_port, "hive", options.transport,
"foo", "bar")
else:
hs2_transport = create_transport(hs2_host, hs2_port, "hive", options.transport)
protocol = TBinaryProtocol.TBinaryProtocol(hs2_transport)
hs2_client = LegacyTCLIService.Client(protocol)
# Try to connect to the HiveServer2 service and create a session
now = time.time()
TIMEOUT_SECONDS = 30.0
while time.time() - now < TIMEOUT_SECONDS:
try:
hs2_transport.open()
open_session_req = LegacyTCLIService.TOpenSessionReq()
open_session_req.username = getpass.getuser()
resp = hs2_client.OpenSession(open_session_req)
if resp.status.statusCode == LegacyTCLIService.TStatusCode.SUCCESS_STATUS:
close_session_req = LegacyTCLIService.TCloseSessionReq()
close_session_req.sessionHandle = resp.sessionHandle
hs2_client.CloseSession(close_session_req)
print "HiveServer2 service is up at %s." % options.hs2_hostport
exit(0)
except Exception as e:
if "SASL" in e.message: # Bail out on SASL failures
print "SASL failure when attempting connection:"
raise
if "GSS" in e.message: # Other GSSAPI failures
print "GSS failure when attempting connection:"
raise
print "Waiting for HiveServer2 at %s..." % options.hs2_hostport
print e
finally:
hs2_transport.close()
time.sleep(0.5)
print "HiveServer2 service failed to start within %s seconds." % TIMEOUT_SECONDS
exit(1)
| apache-2.0 | 5,044,775,257,845,434,000 | 41.012195 | 87 | 0.727141 | false |
emfcamp/micropython | tests/basics/int1.py | 44 | 1559 | print(int(False))
print(int(True))
print(int(0))
print(int(1))
print(int(+1))
print(int(-1))
print(int('0'))
print(int('+0'))
print(int('-0'))
print(int('1'))
print(int('+1'))
print(int('-1'))
print(int('01'))
print(int('9'))
print(int('10'))
print(int('+10'))
print(int('-10'))
print(int('12'))
print(int('-12'))
print(int('99'))
print(int('100'))
print(int('314'))
print(int(' 314'))
print(int('314 '))
print(int(' \t\t 314 \t\t '))
print(int(' 1 '))
print(int(' -3 '))
print(int('0', 10))
print(int('1', 10))
print(int(' \t 1 \t ', 10))
print(int('11', 10))
print(int('11', 16))
print(int('11', 8))
print(int('11', 2))
print(int('11', 36))
print(int('0o123', 0))
print(int('8388607'))
print(int('0x123', 16))
print(int('0X123', 16))
print(int('0A', 16))
print(int('0o123', 8))
print(int('0O123', 8))
print(int('0123', 8))
print(int('0b100', 2))
print(int('0B100', 2))
print(int('0100', 2))
print(int(' \t 0o12', 8))
print(int('0o12 \t ', 8))
print(int(b"12", 10))
print(int(b"12"))
def test(value, base):
try:
print(int(value, base))
except ValueError:
print('ValueError')
test('x', 0)
test('1x', 0)
test(' 1x', 0)
test(' 1' + chr(2) + ' ', 0)
test('', 0)
test(' ', 0)
test(' \t\t ', 0)
test('0x', 16)
test('0x', 0)
test('0o', 8)
test('0o', 0)
test('0b', 2)
test('0b', 0)
test('0b2', 2)
test('0o8', 8)
test('0xg', 16)
test('1 1', 16)
test('123', 37)
# check that we don't parse this as a floating point number
print(0x1e+1)
# can't convert list to int
try:
int([])
except TypeError:
print("TypeError")
| mit | -5,957,200,644,984,093,000 | 16.516854 | 59 | 0.563823 | false |
skuda/client-python | kubernetes/client/models/apps_v1beta1_deployment_strategy.py | 1 | 4125 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class AppsV1beta1DeploymentStrategy(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, rolling_update=None, type=None):
"""
AppsV1beta1DeploymentStrategy - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'rolling_update': 'AppsV1beta1RollingUpdateDeployment',
'type': 'str'
}
self.attribute_map = {
'rolling_update': 'rollingUpdate',
'type': 'type'
}
self._rolling_update = rolling_update
self._type = type
@property
def rolling_update(self):
"""
Gets the rolling_update of this AppsV1beta1DeploymentStrategy.
Rolling update config params. Present only if DeploymentStrategyType = RollingUpdate.
:return: The rolling_update of this AppsV1beta1DeploymentStrategy.
:rtype: AppsV1beta1RollingUpdateDeployment
"""
return self._rolling_update
@rolling_update.setter
def rolling_update(self, rolling_update):
"""
Sets the rolling_update of this AppsV1beta1DeploymentStrategy.
Rolling update config params. Present only if DeploymentStrategyType = RollingUpdate.
:param rolling_update: The rolling_update of this AppsV1beta1DeploymentStrategy.
:type: AppsV1beta1RollingUpdateDeployment
"""
self._rolling_update = rolling_update
@property
def type(self):
"""
Gets the type of this AppsV1beta1DeploymentStrategy.
Type of deployment. Can be \"Recreate\" or \"RollingUpdate\". Default is RollingUpdate.
:return: The type of this AppsV1beta1DeploymentStrategy.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this AppsV1beta1DeploymentStrategy.
Type of deployment. Can be \"Recreate\" or \"RollingUpdate\". Default is RollingUpdate.
:param type: The type of this AppsV1beta1DeploymentStrategy.
:type: str
"""
self._type = type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| apache-2.0 | 8,397,912,927,662,578,000 | 28.464286 | 105 | 0.572606 | false |
kennedyshead/home-assistant | homeassistant/components/vultr/sensor.py | 5 | 3249 | """Support for monitoring the state of Vultr Subscriptions."""
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import CONF_MONITORED_CONDITIONS, CONF_NAME, DATA_GIGABYTES
import homeassistant.helpers.config_validation as cv
from . import (
ATTR_CURRENT_BANDWIDTH_USED,
ATTR_PENDING_CHARGES,
CONF_SUBSCRIPTION,
DATA_VULTR,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Vultr {} {}"
MONITORED_CONDITIONS = {
ATTR_CURRENT_BANDWIDTH_USED: [
"Current Bandwidth Used",
DATA_GIGABYTES,
"mdi:chart-histogram",
],
ATTR_PENDING_CHARGES: ["Pending Charges", "US$", "mdi:currency-usd"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_SUBSCRIPTION): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(
CONF_MONITORED_CONDITIONS, default=list(MONITORED_CONDITIONS)
): vol.All(cv.ensure_list, [vol.In(MONITORED_CONDITIONS)]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Vultr subscription (server) sensor."""
vultr = hass.data[DATA_VULTR]
subscription = config.get(CONF_SUBSCRIPTION)
name = config.get(CONF_NAME)
monitored_conditions = config.get(CONF_MONITORED_CONDITIONS)
if subscription not in vultr.data:
_LOGGER.error("Subscription %s not found", subscription)
return
sensors = []
for condition in monitored_conditions:
sensors.append(VultrSensor(vultr, subscription, condition, name))
add_entities(sensors, True)
class VultrSensor(SensorEntity):
"""Representation of a Vultr subscription sensor."""
def __init__(self, vultr, subscription, condition, name):
"""Initialize a new Vultr sensor."""
self._vultr = vultr
self._condition = condition
self._name = name
self.subscription = subscription
self.data = None
condition_info = MONITORED_CONDITIONS[condition]
self._condition_name = condition_info[0]
self._units = condition_info[1]
self._icon = condition_info[2]
@property
def name(self):
"""Return the name of the sensor."""
try:
return self._name.format(self._condition_name)
except IndexError:
try:
return self._name.format(self.data["label"], self._condition_name)
except (KeyError, TypeError):
return self._name
@property
def icon(self):
"""Return the icon used in the frontend if any."""
return self._icon
@property
def unit_of_measurement(self):
"""Return the unit of measurement to present the value in."""
return self._units
@property
def state(self):
"""Return the value of this given sensor type."""
try:
return round(float(self.data.get(self._condition)), 2)
except (TypeError, ValueError):
return self.data.get(self._condition)
def update(self):
"""Update state of sensor."""
self._vultr.update()
self.data = self._vultr.data[self.subscription]
| apache-2.0 | -2,779,895,684,992,299,000 | 28.536364 | 84 | 0.642351 | false |
sf-wind/caffe2 | caffe2/python/caffe_translator.py | 3 | 35395 | # Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
## @package caffe_translator
# Module caffe2.python.caffe_translator
#!/usr/bin/env python2
import argparse
import copy
import logging
import re
import numpy as np # noqa
from caffe2.proto import caffe2_pb2, caffe2_legacy_pb2
from caffe.proto import caffe_pb2
from caffe2.python import core, utils, workspace
from google.protobuf import text_format
logging.basicConfig()
log = logging.getLogger("caffe_translator")
log.setLevel(logging.INFO)
def _StateMeetsRule(state, rule):
"""A function that reproduces Caffe's StateMeetsRule functionality."""
if rule.HasField('phase') and rule.phase != state.phase:
return False
if rule.HasField('min_level') and state.level < rule.min_level:
return False
if rule.HasField('max_level') and state.level > rule.max_level:
return False
curr_stages = set(list(state.stage))
# all stages in rule.stages should be in, otherwise it's not a match.
if len(rule.stage) and any([s not in curr_stages for s in rule.stage]):
return False
# none of the stage in rule.stages should be in, otherwise it's not a match.
if len(rule.not_stage) and any([s in curr_stages for s in rule.not_stage]):
return False
# If none of the nonmatch happens, return True.
return True
def _ShouldInclude(net_state, layer):
"""A function that reproduces Caffe's inclusion and exclusion rule."""
ret = (len(layer.include) == 0)
# check exclude rules: if any exclusion is met, we shouldn't include.
ret &= not any([_StateMeetsRule(net_state, rule) for rule in layer.exclude])
if len(layer.include):
# check include rules: if any inclusion is met, we should include.
ret |= any([_StateMeetsRule(net_state, rule) for rule in layer.include])
return ret
def _GetLegacyDims(net, net_params, dummy_input, legacy_pad_ops):
dim_map = {}
ws = workspace.C.Workspace()
for param in net_params.protos:
ws.create_blob(param.name) \
.feed(utils.Caffe2TensorToNumpyArray(param))
external_input = net.op[0].input[0]
ws.create_blob(external_input).feed(dummy_input)
# Get dimensions with legacy pad
for i in range(len(net.op)):
op_def = net.op[i]
ws._run_operator(op_def.SerializeToString())
if i in legacy_pad_ops:
output = op_def.output[0]
blob_legacy = ws.fetch_blob(output)
dim_map[i] = blob_legacy.shape
return dim_map
def _GetLegacyPadArgs(op_def, arg_map):
pads = {}
keys = ['pad_l', 'pad_t', 'pad_r', 'pad_b']
is_pad = 'pad' in arg_map
if is_pad:
for k in keys:
pads[k] = arg_map['pad'].i
else:
pads = {x: arg_map[x].i for x in keys}
return pads
def _AdjustDims(op_def, arg_map, pads, dim1, dim2):
n1, c1, h1, w1 = dim1
n2, c2, h2, w2 = dim2
assert(n1 == n2)
assert(c1 == c2)
is_pad = 'pad' in arg_map
if h1 != h2 or w1 != w2:
if h1 == h2 + 1:
pads['pad_b'] += 1
elif h1 != h2:
raise Exception("Unexpected dimensions for height:", h1, h2)
if w1 == w2 + 1:
pads['pad_r'] += 1
elif w1 != w2:
raise Exception("Unexpected dimensions for width:", w1, w2)
if is_pad:
op_def.arg.remove(arg_map['pad'])
args = []
for name in pads.keys():
arg = caffe2_pb2.Argument()
arg.name = name
arg.i = pads[name]
args.append(arg)
op_def.arg.extend(args)
else:
for name in pads.keys():
arg_map[name].i = pads[name]
def _RemoveLegacyPad(net, net_params, input_dims):
legacy_pad_ops = []
for i in range(len(net.op)):
op_def = net.op[i]
if re.match(r'^(Conv|ConvTranspose|MaxPool|AveragePool)(\dD)?$',
op_def.type):
for arg in op_def.arg:
if arg.name == 'legacy_pad':
legacy_pad_ops.append(i)
break
if legacy_pad_ops:
n, c, h, w = input_dims
dummy_input = np.random.randn(n, c, h, w).astype(np.float32)
dim_map = _GetLegacyDims(net, net_params, dummy_input, legacy_pad_ops)
# Running with the legacy pad argument removed
# compare the dimensions and adjust pad argument when necessary
ws = workspace.C.Workspace()
external_input = net.op[0].input[0]
ws.create_blob(external_input).feed_blob(dummy_input)
for param in net_params.protos:
ws.create_blob(param.name) \
.feed_blob(utils.Caffe2TensorToNumpyArray(param))
for i in range(len(net.op)):
op_def = net.op[i]
if i in legacy_pad_ops:
arg_map = {}
for arg in op_def.arg:
arg_map[arg.name] = arg
pads = _GetLegacyPadArgs(op_def, arg_map)
# remove legacy pad arg
for j in range(len(op_def.arg)):
arg = op_def.arg[j]
if arg.name == 'legacy_pad':
del op_def.arg[j]
break
output = op_def.output[0]
# use a new name to avoid the interference with inplace
nonlegacy_output = output + '_nonlegacy'
op_def.output[0] = nonlegacy_output
ws._run_operator(op_def.SerializeToString())
blob_nonlegacy = ws.fetch_blob(nonlegacy_output)
# reset output name
op_def.output[0] = output
dim1 = dim_map[i]
dim2 = blob_nonlegacy.shape
_AdjustDims(op_def, arg_map, pads, dim1, dim2)
ws._run_operator(op_def.SerializeToString())
return net
def _GetBlobDimMap(net, net_params, dummy_input):
dim_map = {}
ws = workspace.C.Workspace()
for param in net_params.protos:
ws.create_blob(param.name) \
.feed(utils.Caffe2TensorToNumpyArray(param))
external_input = net.op[0].input[0]
ws.create_blob(external_input).feed(dummy_input)
# Get dimensions with legacy pad
for i in range(len(net.op)):
op_def = net.op[i]
ws._run_operator(op_def.SerializeToString())
for output in op_def.output:
blob = ws.fetch_blob(output)
dim_map[output] = blob.shape
return dim_map
def _GetInputDims(caffe_net):
input_dims = []
if caffe_net.input_dim:
input_dims = caffe_net.input_dim
elif caffe_net.input_shape:
input_dims = caffe_net.input_shape[0].dim
elif caffe_net.layer[0].input_param.shape:
# getting input dimension from first layer
input_dims = caffe_net.layer[0].input_param.shape[0].dim
return input_dims
class TranslatorRegistry(object):
registry_ = {}
@classmethod
def Register(cls, op_name):
"""A decorator for registering gradient mappings."""
def Wrapper(func):
cls.registry_[op_name] = func
return func
return Wrapper
@classmethod
def TranslateLayer(cls, layer, pretrained_blobs, is_test, **kwargs):
try:
caffe_ops, params = cls.registry_[layer.type](
layer, pretrained_blobs, is_test, **kwargs)
except KeyError:
raise KeyError('No translator registered for layer: %s yet.' %
str(layer))
if caffe_ops is None:
caffe_ops = []
if type(caffe_ops) is not list:
caffe_ops = [caffe_ops]
return caffe_ops, params
@classmethod
def TranslateModel(
cls,
caffe_net,
pretrained_net,
is_test=False,
net_state=None,
remove_legacy_pad=False,
input_dims=None
):
net_state = caffe_pb2.NetState() if net_state is None else net_state
net = caffe2_pb2.NetDef()
net.name = caffe_net.name
net_params = caffe2_pb2.TensorProtos()
if len(caffe_net.layers) > 0:
raise ValueError(
'I think something is wrong. This translation script '
'only accepts new style layers that are stored in the '
'layer field.'
)
if not input_dims:
input_dims = _GetInputDims(caffe_net)
for layer in caffe_net.layer:
if not _ShouldInclude(net_state, layer):
log.info('Current net state does not need layer {}'
.format(layer.name))
continue
log.info('Translate layer {}'.format(layer.name))
# Get pretrained one
pretrained_layers = (
[l for l in pretrained_net.layer
if l.name == layer.name] + [l
for l in pretrained_net.layers
if l.name == layer.name]
)
if len(pretrained_layers) > 1:
raise ValueError(
'huh? more than one pretrained layer of one name?')
elif len(pretrained_layers) == 1:
pretrained_blobs = [
utils.CaffeBlobToNumpyArray(blob)
for blob in pretrained_layers[0].blobs
]
else:
# No pretrained layer for the given layer name. We'll just pass
# no parameter blobs.
# print 'No pretrained layer for layer', layer.name
pretrained_blobs = []
operators, params = cls.TranslateLayer(
layer, pretrained_blobs, is_test, net=net,
net_params=net_params, input_dims=input_dims)
net.op.extend(operators)
net_params.protos.extend(params)
if remove_legacy_pad:
assert input_dims, \
'Please specify input_dims to remove legacy_pad'
net = _RemoveLegacyPad(net, net_params, input_dims)
return net, net_params
def TranslateModel(*args, **kwargs):
return TranslatorRegistry.TranslateModel(*args, **kwargs)
def ConvertTensorProtosToInitNet(net_params, input_name):
"""Takes the net_params returned from TranslateModel, and wrap it as an
init net that contain GivenTensorFill.
This is a very simple feature that only works with float tensors, and is
only intended to be used in an environment where you want a single
initialization file - for more complex cases, use a db to store the
parameters.
"""
init_net = caffe2_pb2.NetDef()
for tensor in net_params.protos:
if len(tensor.float_data) == 0:
raise RuntimeError(
"Only float tensors are supported in this util.")
op = core.CreateOperator(
"GivenTensorFill", [], [tensor.name],
arg=[
utils.MakeArgument("shape", list(tensor.dims)),
utils.MakeArgument("values", tensor.float_data)])
init_net.op.extend([op])
init_net.op.extend([core.CreateOperator("ConstantFill", [], [input_name], shape=[1])])
return init_net
def BaseTranslate(layer, caffe2_type):
"""A simple translate interface that maps the layer input and output."""
caffe2_op = caffe2_pb2.OperatorDef()
caffe2_op.type = caffe2_type
caffe2_op.input.extend(layer.bottom)
caffe2_op.output.extend(layer.top)
return caffe2_op
def AddArgument(op, key, value):
"""Makes an argument based on the value type."""
op.arg.extend([utils.MakeArgument(key, value)])
################################################################################
# Common translators for layers.
################################################################################
@TranslatorRegistry.Register("Input")
def TranslateInput(layer, pretrained_blobs, is_test, **kwargs):
return [], []
@TranslatorRegistry.Register("VideoData")
def TranslateVideoData(layer, pretrained_blobs, is_test, **kwargs):
return [], []
@TranslatorRegistry.Register("Data")
def TranslateData(layer, pretrained_blobs, is_test, **kwargs):
return [], []
# A function used in convolution, pooling and deconvolution to deal with
# conv pool specific parameters.
def _TranslateStridePadKernelHelper(param, caffe_op):
try:
if (len(param.stride) > 1 or len(param.kernel_size) > 1 or
len(param.pad) > 1):
raise NotImplementedError(
"Translator currently does not support non-conventional "
"pad/kernel/stride settings."
)
stride = param.stride[0] if len(param.stride) else 1
pad = param.pad[0] if len(param.pad) else 0
kernel = param.kernel_size[0] if len(param.kernel_size) else 0
except TypeError:
# This catches the case of a PoolingParameter, in which case we are
# having non-repeating pad, stride and kernel.
stride = param.stride
pad = param.pad
kernel = param.kernel_size
# Get stride
if param.HasField("stride_h") or param.HasField("stride_w"):
AddArgument(caffe_op, "stride_h", param.stride_h)
AddArgument(caffe_op, "stride_w", param.stride_w)
else:
AddArgument(caffe_op, "stride", stride)
# Get pad
if param.HasField("pad_h") or param.HasField("pad_w"):
if param.pad_h == param.pad_w:
AddArgument(caffe_op, "pad", param.pad_h)
else:
AddArgument(caffe_op, "pad_t", param.pad_h)
AddArgument(caffe_op, "pad_b", param.pad_h)
AddArgument(caffe_op, "pad_l", param.pad_w)
AddArgument(caffe_op, "pad_r", param.pad_w)
else:
AddArgument(caffe_op, "pad", pad)
# Get kernel
if param.HasField("kernel_h") or param.HasField("kernel_w"):
AddArgument(caffe_op, "kernel_h", param.kernel_h)
AddArgument(caffe_op, "kernel_w", param.kernel_w)
else:
AddArgument(caffe_op, "kernel", kernel)
@TranslatorRegistry.Register("Convolution3D")
def TranslateConvNd(layer, pretrained_blobs, is_test, **kwargs):
param = layer.convolution3d_param
caffe_op = BaseTranslate(layer, "Conv")
output = caffe_op.output[0]
caffe_op.input.append(output + '_w')
AddArgument(
caffe_op,
"kernels",
[param.kernel_depth, param.kernel_size, param.kernel_size])
AddArgument(
caffe_op,
"strides",
[param.temporal_stride, param.stride, param.stride])
temporal_pad = 0
spatial_pad = 0
if hasattr(param, 'temporal_pad'):
temporal_pad = param.temporal_pad
if hasattr(param, 'pad'):
spatial_pad = param.pad
AddArgument(caffe_op, "pads", [temporal_pad, spatial_pad, spatial_pad] * 2)
# weight
params = [
utils.NumpyArrayToCaffe2Tensor(pretrained_blobs[0], output + '_w')]
# bias
if len(pretrained_blobs) == 2:
caffe_op.input.append(output + '_b')
params.append(
utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[1].flatten(), output + '_b'))
return caffe_op, params
@TranslatorRegistry.Register("Convolution")
def TranslateConv(layer, pretrained_blobs, is_test, **kwargs):
param = layer.convolution_param
caffe_op = BaseTranslate(layer, "Conv")
output = caffe_op.output[0]
caffe_op.input.append(output + '_w')
_TranslateStridePadKernelHelper(param, caffe_op)
# weight
params = [
utils.NumpyArrayToCaffe2Tensor(pretrained_blobs[0], output + '_w')]
# bias
if len(pretrained_blobs) == 2:
caffe_op.input.append(output + '_b')
params.append(
utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[1].flatten(), output + '_b'))
# Group convolution option
if param.group != 1:
AddArgument(caffe_op, "group", param.group)
# Get dilation - not tested. If you have a model and this checks out,
# please provide a test and uncomment this.
if len(param.dilation) > 0:
if len(param.dilation) == 1:
AddArgument(caffe_op, "dilation", param.dilation[0])
elif len(param.dilation) == 2:
AddArgument(caffe_op, "dilation_h", param.dilation[0])
AddArgument(caffe_op, "dilation_w", param.dilation[1])
return caffe_op, params
@TranslatorRegistry.Register("Deconvolution")
def TranslateDeconv(layer, pretrained_blobs, is_test, **kwargs):
param = layer.convolution_param
if param.group > 1:
raise NotImplementedError(
"Translator currently does not support group deconvolution."
)
caffe_op = BaseTranslate(layer, "ConvTranspose")
output = caffe_op.output[0]
_TranslateStridePadKernelHelper(param, caffe_op)
caffe_op.input.extend([output + '_w'])
AddArgument(caffe_op, "order", "NCHW")
weight = utils.NumpyArrayToCaffe2Tensor(pretrained_blobs[0], output + '_w')
if param.bias_term:
bias = utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[1].flatten(), output + '_b'
)
caffe_op.input.extend([output + '_b'])
return caffe_op, [weight, bias]
else:
return caffe_op, [weight]
@TranslatorRegistry.Register("Crop")
def TranslateCrop(layer, pretrained_blobs, is_test, **kwargs):
net, net_params, input_dims = kwargs['net'], kwargs['net_params'],
kwargs['input_dims']
n, c, h, w = input_dims
dummy_input = np.random.randn(n, c, h, w).astype(np.float32)
dim_map = _GetBlobDimMap(net, net_params, dummy_input)
param = layer.crop_param
axis, offsets = param.axis, param.offset
caffe_op = BaseTranslate(layer, "Slice")
input_1 = caffe_op.input[1]
input_1_dim = dim_map[input_1]
starts, ends = [], []
dims = len(dim_map[input_1])
assert len(offsets) == 1, 'Caffe Translator for Crop only works for offset \
of 1 for now'
for _ in range(axis):
starts.append(0)
ends.append(-1)
end_offset = [int(offsets[0] + input_1_dim[i]) for i in range(axis, dims)]
ends.extend(end_offset)
starts.extend([offsets[0]] * len(end_offset))
op = caffe2_pb2.OperatorDef()
op.input.extend([caffe_op.input[0]])
op.output.extend(caffe_op.output)
op.arg.extend(caffe_op.arg)
op.type = caffe_op.type
AddArgument(op, "starts", starts)
AddArgument(op, "ends", ends)
return op, []
@TranslatorRegistry.Register("ReLU")
def TranslateRelu(layer, pretrained_blobs, is_test, **kwargs):
return BaseTranslate(layer, "Relu"), []
@TranslatorRegistry.Register("Pooling")
def TranslatePool(layer, pretrained_blobs, is_test, **kwargs):
param = layer.pooling_param
if param.pool == caffe_pb2.PoolingParameter.MAX:
caffe_op = BaseTranslate(layer, "MaxPool")
elif param.pool == caffe_pb2.PoolingParameter.AVE:
caffe_op = BaseTranslate(layer, "AveragePool")
_TranslateStridePadKernelHelper(param, caffe_op)
AddArgument(caffe_op, "order", "NCHW")
try:
# In the Facebook port of Caffe, a torch_pooling field was added to
# map the pooling computation of Torch. Essentially, it uses
# floor((height + 2 * padding - kernel) / stride) + 1
# instead of
# ceil((height + 2 * padding - kernel) / stride) + 1
# which is Caffe's version.
# Torch pooling is actually the same as Caffe2 pooling, so we don't
# need to do anything.
is_torch_pooling = param.torch_pooling
except AttributeError:
is_torch_pooling = False
if not is_torch_pooling:
AddArgument(caffe_op, "legacy_pad",
caffe2_legacy_pb2.CAFFE_LEGACY_POOLING)
if param.global_pooling:
AddArgument(caffe_op, "global_pooling", 1)
return caffe_op, []
@TranslatorRegistry.Register("Pooling3D")
def TranslatePool3D(layer, pretrained_blobs, is_test, **kwargs):
param = layer.pooling3d_param
if param.pool == caffe_pb2.Pooling3DParameter.MAX:
caffe_op = BaseTranslate(layer, "MaxPool")
elif param.pool == caffe_pb2.Pooling3DParameter.AVE:
caffe_op = BaseTranslate(layer, "AveragePool")
AddArgument(caffe_op, "order", "NCHW")
AddArgument(
caffe_op,
"kernels",
[param.kernel_depth, param.kernel_size, param.kernel_size])
AddArgument(
caffe_op,
"strides",
[param.temporal_stride, param.stride, param.stride])
temporal_pad = 0
spatial_pad = 0
if hasattr(param, 'temporal_pad'):
temporal_pad = param.temporal_pad
if hasattr(param, 'pad'):
spatial_pad = param.pad
AddArgument(caffe_op, "pads", [temporal_pad, spatial_pad, spatial_pad] * 2)
return caffe_op, []
@TranslatorRegistry.Register("LRN")
def TranslateLRN(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "LRN")
caffe_op.output.extend(['_' + caffe_op.output[0] + '_scale'])
param = layer.lrn_param
if param.norm_region != caffe_pb2.LRNParameter.ACROSS_CHANNELS:
raise ValueError(
"Does not support norm region other than across channels.")
AddArgument(caffe_op, "size", int(param.local_size))
AddArgument(caffe_op, "alpha", float(param.alpha))
AddArgument(caffe_op, "beta", float(param.beta))
AddArgument(caffe_op, "bias", float(param.k))
AddArgument(caffe_op, "order", "NCHW")
return caffe_op, []
@TranslatorRegistry.Register("InnerProduct")
def TranslateInnerProduct(layer, pretrained_blobs, is_test, **kwargs):
param = layer.inner_product_param
try:
if param.axis != 1 or param.transpose:
raise ValueError(
"We don't have testing case for non-default axis and transpose "
"cases yet so we are disabling it for now. If you have a model "
"with this, please do send us your model for us to update this "
"support, and you are more than welcome to send a PR for this.")
except AttributeError:
# We might be using an historic Caffe protobuf that does not have axis
# and transpose arguments, so we will silently pass.
pass
caffe_op = BaseTranslate(layer, "FC")
output = caffe_op.output[0]
caffe_op.input.extend([output + '_w', output + '_b'])
# To provide the old-style 4-dimensional blob (1, 1, dim_output, dim_input)
# case, we always explicitly reshape the pretrained blob.
if pretrained_blobs[0].ndim not in [2, 4]:
raise ValueError("Unexpected weight ndim.")
if (pretrained_blobs[0].ndim == 4 and
list(pretrained_blobs[0].shape[:2]) != [1, 1]):
raise ValueError(
"If pretrained blob has 4 dims (old-style Caffe), the first two "
"should be of value 1, but I got " + str(pretrained_blobs[0].shape))
weight = utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[0].reshape(-1, pretrained_blobs[0].shape[-1]),
output + '_w'
)
bias = utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[1].flatten(), output + '_b'
)
return caffe_op, [weight, bias]
@TranslatorRegistry.Register("Dropout")
def TranslateDropout(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "Dropout")
caffe_op.output.extend(['_' + caffe_op.output[0] + '_mask'])
param = layer.dropout_param
AddArgument(caffe_op, "ratio", param.dropout_ratio)
if (is_test):
AddArgument(caffe_op, "is_test", 1)
return caffe_op, []
@TranslatorRegistry.Register("Softmax")
def TranslateSoftmax(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "Softmax")
return caffe_op, []
@TranslatorRegistry.Register("SoftmaxWithLoss")
def TranslateSoftmaxWithLoss(layer, pretrained_blobs, is_test, **kwargs):
softmax_op = core.CreateOperator(
"Softmax", [layer.bottom[0]],
layer.bottom[0] + "_translator_autogen_softmax")
xent_op = core.CreateOperator(
"LabelCrossEntropy",
[softmax_op.output[0], layer.bottom[1]],
layer.bottom[0] + "_translator_autogen_xent")
loss_op = core.CreateOperator(
"AveragedLoss",
xent_op.output[0],
layer.top[0])
return [softmax_op, xent_op, loss_op], []
@TranslatorRegistry.Register("Accuracy")
def TranslateAccuracy(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "Accuracy")
if layer.accuracy_param.top_k != 1:
AddArgument(caffe_op, "top_k", layer.accuracy_param.top_k)
return caffe_op, []
@TranslatorRegistry.Register("Concat")
def TranslateConcat(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "Concat")
caffe_op.output.extend(['_' + caffe_op.output[0] + '_dims'])
AddArgument(caffe_op, "order", "NCHW")
return caffe_op, []
@TranslatorRegistry.Register("TanH")
def TranslateTanH(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "Tanh")
return caffe_op, []
@TranslatorRegistry.Register("InstanceNorm")
def TranslateInstanceNorm(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "InstanceNorm")
output = caffe_op.output[0]
weight = utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[0].flatten(), output + '_w')
bias = utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[1].flatten(), output + '_b')
caffe_op.input.extend([output + '_w', output + '_b'])
AddArgument(caffe_op, "order", "NCHW")
return caffe_op, [weight, bias]
@TranslatorRegistry.Register("BatchNorm")
def TranslateBatchNorm(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "SpatialBN")
output = caffe_op.output[0]
param = layer.batch_norm_param
AddArgument(caffe_op, "is_test", is_test)
AddArgument(caffe_op, "epsilon", param.eps)
AddArgument(caffe_op, "order", "NCHW")
caffe_op.input.extend(
[output + "_scale",
output + "_bias",
output + "_mean",
output + "_var"])
if not is_test:
caffe_op.output.extend(
[output + "_mean",
output + "_var",
output + "_saved_mean",
output + "_saved_var"])
n_channels = pretrained_blobs[0].shape[0]
if pretrained_blobs[2][0] != 0:
mean = utils.NumpyArrayToCaffe2Tensor(
(1. / pretrained_blobs[2][0]) * pretrained_blobs[0],
output + '_mean')
var = utils.NumpyArrayToCaffe2Tensor(
(1. / pretrained_blobs[2][0]) * pretrained_blobs[1],
output + '_var')
else:
raise RuntimeError("scalar is zero.")
pretrained_blobs[2][0] = 1
pretrained_blobs[2] = np.tile(pretrained_blobs[2], (n_channels, ))
scale = utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[2],
output + '_scale')
bias = utils.NumpyArrayToCaffe2Tensor(
np.zeros_like(pretrained_blobs[2]),
output + '_bias')
return caffe_op, [scale, bias, mean, var]
@TranslatorRegistry.Register("Eltwise")
def TranslateElementWise(layer, pretrained_blobs, is_test, **kwargs):
param = layer.eltwise_param
# TODO(jiayq): if we have a protobuf that uses this, lift this constraint
# and verify that we can correctly translate.
if len(param.coeff) or param.operation != 1:
raise RuntimeError("This eltwise layer is not yet supported.")
caffe_op = BaseTranslate(layer, "Sum")
return caffe_op, []
@TranslatorRegistry.Register("Scale")
def TranslateScale(layer, pretrained_blobs, is_test, **kwargs):
mul_op = BaseTranslate(layer, "Mul")
scale_param = layer.scale_param
AddArgument(mul_op, "axis", scale_param.axis)
AddArgument(mul_op, "broadcast", True)
if len(mul_op.input) == 1:
# the scale parameter is in pretrained blobs
if scale_param.num_axes != 1:
raise RuntimeError("This path has not been verified yet.")
output = mul_op.output[0]
mul_op_param = output + '_w'
mul_op.input.append(mul_op_param)
weights = []
weights.append(utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[0].flatten(), mul_op_param))
add_op = None
if len(pretrained_blobs) == 1:
# No bias-term in Scale layer
pass
elif len(pretrained_blobs) == 2:
# Caffe Scale layer supports a bias term such that it computes
# (scale_param * X + bias), whereas Caffe2 Mul op doesn't.
# Include a separate Add op for the bias followed by Mul.
add_op = copy.deepcopy(mul_op)
add_op.type = "Add"
add_op_param = output + '_b'
internal_blob = output + "_internal"
del mul_op.output[:]
mul_op.output.append(internal_blob)
del add_op.input[:]
add_op.input.append(internal_blob)
add_op.input.append(add_op_param)
weights.append(utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[1].flatten(), add_op_param))
else:
raise RuntimeError("Unexpected number of pretrained blobs in Scale")
caffe_ops = [mul_op]
if add_op:
caffe_ops.append(add_op)
assert len(caffe_ops) == len(weights)
return caffe_ops, weights
elif len(mul_op.input) == 2:
# TODO(jiayq): find a protobuf that uses this and verify.
raise RuntimeError("This path has not been verified yet.")
else:
raise RuntimeError("Unexpected number of inputs.")
@TranslatorRegistry.Register("Reshape")
def TranslateReshape(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "Reshape")
caffe_op.output.append("_" + caffe_op.input[0] + "_dims")
reshape_param = layer.reshape_param
AddArgument(caffe_op, 'shape', reshape_param.shape.dim)
return caffe_op, []
@TranslatorRegistry.Register("Flatten")
def TranslateFlatten(layer, pretrained_blobs, is_test, **kwargs):
param = layer.flatten_param
if param.end_axis != -1:
raise NotImplementedError("flatten_param.end_axis not supported yet.")
if param.axis == 0:
caffe_op = BaseTranslate(layer, "FlattenToVec")
elif param.axis == 1:
caffe_op = BaseTranslate(layer, "Flatten")
else:
# This could be a Reshape op, but dim size is not known here.
raise NotImplementedError(
"Not supported yet for flatten_param.axis {}.".format(param.axis))
return caffe_op, []
@TranslatorRegistry.Register("Sigmoid")
def TranslateSigmoid(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "Sigmoid")
return caffe_op, []
@TranslatorRegistry.Register("ROIPooling")
def TranslateROIPooling(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "RoIPool")
AddArgument(caffe_op, "order", "NCHW")
if is_test:
AddArgument(caffe_op, "is_test", is_test)
else:
# Only used for gradient computation
caffe_op.output.append(caffe_op.output[0] + '_argmaxes')
param = layer.roi_pooling_param
if param.HasField('pooled_h'):
AddArgument(caffe_op, 'pooled_h', param.pooled_h)
if param.HasField('pooled_w'):
AddArgument(caffe_op, 'pooled_w', param.pooled_w)
if param.HasField('spatial_scale'):
AddArgument(caffe_op, 'spatial_scale', param.spatial_scale)
return caffe_op, []
@TranslatorRegistry.Register("PReLU")
def TranslatePRelu(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "PRelu")
output = caffe_op.output[0]
caffe_op.input.extend([output + '_Slope'])
slope = utils.NumpyArrayToCaffe2Tensor(pretrained_blobs[0], output + '_Slope')
return caffe_op, [slope]
@TranslatorRegistry.Register("Reduction")
def TranslateReduction(layer, pretrained_blobs, is_test, **kwargs):
param = layer.reduction_param
if param.operation == caffe_pb2.ReductionParameter.SUM:
caffe_op = BaseTranslate(layer, "ReduceBackSum")
elif param.operation == caffe_pb2.ReductionParameter.MEAN:
caffe_op = BaseTranslate(layer, "ReduceBackMean")
else:
raise NotImplementedError("Not yet supported")
if param.axis > 0:
# We can't figure out the number of dims to reduce from positive axis
# for back reduction since the shape info is not known here.
raise NotImplementedError("Not yet supported")
num_reduce_dim = -param.axis
AddArgument(caffe_op, "num_reduce_dim", num_reduce_dim)
return caffe_op, []
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Utilitity to convert pretrained caffe models to Caffe2 models.")
parser.add_argument("prototext", help="Caffe prototext.")
parser.add_argument("caffemodel", help="Caffe trained model.")
parser.add_argument("--init_net", help="Caffe2 initialization net.",
default="init_net.pb")
parser.add_argument("--predict_net", help="Caffe2 prediction net.",
default="predict_net.pb")
parser.add_argument("--remove_legacy_pad", help="Remove legacy pad \
(Only works for nets with one input blob)",
action="store_true",
default=False)
parser.add_argument("--input_dims", help="Dimension of input blob", nargs='+',
type=int, default=[])
args = parser.parse_args()
caffenet = caffe_pb2.NetParameter()
caffenet_pretrained = caffe_pb2.NetParameter()
input_proto = args.prototext
input_caffemodel = args.caffemodel
output_init_net = args.init_net
output_predict_net = args.predict_net
text_format.Merge(
open(input_proto, 'r').read(), caffenet
)
caffenet_pretrained.ParseFromString(
open(input_caffemodel, 'rb').read()
)
net, pretrained_params = TranslateModel(
caffenet, caffenet_pretrained, is_test=True,
remove_legacy_pad=args.remove_legacy_pad,
input_dims=args.input_dims
)
# Assume there is one input and one output
external_input = net.op[0].input[0]
external_output = net.op[-1].output[0]
net.external_input.extend([external_input])
net.external_input.extend([param.name for param in pretrained_params.protos])
net.external_output.extend([external_output])
init_net = ConvertTensorProtosToInitNet(pretrained_params, external_input)
with open(output_predict_net, 'wb') as f:
f.write(net.SerializeToString())
with open(output_predict_net + 'txt', 'w') as f:
f.write(str(net))
with open(output_init_net, 'wb') as f:
f.write(init_net.SerializeToString())
| apache-2.0 | 2,941,573,585,728,819,700 | 36.455026 | 90 | 0.616217 | false |
Ormod/Diamond | src/collectors/interrupt/soft.py | 54 | 2024 | # coding=utf-8
"""
The SoftInterruptCollector collects metrics on software interrupts from
/proc/stat
#### Dependencies
* /proc/stat
"""
import platform
import os
import diamond.collector
# Detect the architecture of the system
# and set the counters for MAX_VALUES
# appropriately. Otherwise, rolling over
# counters will cause incorrect or
# negative values.
if platform.architecture()[0] == '64bit':
counter = (2 ** 64) - 1
else:
counter = (2 ** 32) - 1
class SoftInterruptCollector(diamond.collector.Collector):
PROC = '/proc/stat'
def get_default_config_help(self):
config_help = super(SoftInterruptCollector,
self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(SoftInterruptCollector, self).get_default_config()
config.update({
'path': 'softirq'
})
return config
def collect(self):
"""
Collect interrupt data
"""
if not os.access(self.PROC, os.R_OK):
return False
# Open PROC file
file = open(self.PROC, 'r')
# Get data
for line in file:
if not line.startswith('softirq'):
continue
data = line.split()
metric_name = 'total'
metric_value = int(data[1])
metric_value = int(self.derivative(
metric_name,
long(metric_value), counter))
self.publish(metric_name, metric_value)
for i in range(2, len(data)):
metric_name = str(i - 2)
metric_value = int(data[i])
metric_value = int(self.derivative(
metric_name,
long(metric_value), counter))
self.publish(metric_name, metric_value)
# Close file
file.close()
| mit | -4,929,603,325,396,078,000 | 23.385542 | 73 | 0.553854 | false |
louietsai/python-for-android | python3-alpha/python3-src/Lib/idlelib/configHandler.py | 48 | 28730 | """Provides access to stored IDLE configuration information.
Refer to the comments at the beginning of config-main.def for a description of
the available configuration files and the design implemented to update user
configuration information. In particular, user configuration choices which
duplicate the defaults will be removed from the user's configuration files,
and if a file becomes empty, it will be deleted.
The contents of the user files may be altered using the Options/Configure IDLE
menu to access the configuration GUI (configDialog.py), or manually.
Throughout this module there is an emphasis on returning useable defaults
when a problem occurs in returning a requested configuration value back to
idle. This is to allow IDLE to continue to function in spite of errors in
the retrieval of config information. When a default is returned instead of
a requested config value, a message is printed to stderr to aid in
configuration problem notification and resolution.
"""
import os
import sys
from idlelib import macosxSupport
from configparser import ConfigParser, NoOptionError, NoSectionError
class InvalidConfigType(Exception): pass
class InvalidConfigSet(Exception): pass
class InvalidFgBg(Exception): pass
class InvalidTheme(Exception): pass
class IdleConfParser(ConfigParser):
"""
A ConfigParser specialised for idle configuration file handling
"""
def __init__(self, cfgFile, cfgDefaults=None):
"""
cfgFile - string, fully specified configuration file name
"""
self.file=cfgFile
ConfigParser.__init__(self,defaults=cfgDefaults)
def Get(self, section, option, type=None, default=None, raw=False):
"""
Get an option value for given section/option or return default.
If type is specified, return as type.
"""
if not self.has_option(section, option):
return default
if type=='bool':
return self.getboolean(section, option)
elif type=='int':
return self.getint(section, option)
else:
return self.get(section, option, raw=raw)
def GetOptionList(self,section):
"""
Get an option list for given section
"""
if self.has_section(section):
return self.options(section)
else: #return a default value
return []
def Load(self):
"""
Load the configuration file from disk
"""
self.read(self.file)
class IdleUserConfParser(IdleConfParser):
"""
IdleConfigParser specialised for user configuration handling.
"""
def AddSection(self,section):
"""
if section doesn't exist, add it
"""
if not self.has_section(section):
self.add_section(section)
def RemoveEmptySections(self):
"""
remove any sections that have no options
"""
for section in self.sections():
if not self.GetOptionList(section):
self.remove_section(section)
def IsEmpty(self):
"""
Remove empty sections and then return 1 if parser has no sections
left, else return 0.
"""
self.RemoveEmptySections()
if self.sections():
return 0
else:
return 1
def RemoveOption(self,section,option):
"""
If section/option exists, remove it.
Returns 1 if option was removed, 0 otherwise.
"""
if self.has_section(section):
return self.remove_option(section,option)
def SetOption(self,section,option,value):
"""
Sets option to value, adding section if required.
Returns 1 if option was added or changed, otherwise 0.
"""
if self.has_option(section,option):
if self.get(section,option)==value:
return 0
else:
self.set(section,option,value)
return 1
else:
if not self.has_section(section):
self.add_section(section)
self.set(section,option,value)
return 1
def RemoveFile(self):
"""
Removes the user config file from disk if it exists.
"""
if os.path.exists(self.file):
os.remove(self.file)
def Save(self):
"""Update user configuration file.
Remove empty sections. If resulting config isn't empty, write the file
to disk. If config is empty, remove the file from disk if it exists.
"""
if not self.IsEmpty():
fname = self.file
try:
cfgFile = open(fname, 'w')
except IOError:
os.unlink(fname)
cfgFile = open(fname, 'w')
self.write(cfgFile)
else:
self.RemoveFile()
class IdleConf:
"""
holds config parsers for all idle config files:
default config files
(idle install dir)/config-main.def
(idle install dir)/config-extensions.def
(idle install dir)/config-highlight.def
(idle install dir)/config-keys.def
user config files
(user home dir)/.idlerc/config-main.cfg
(user home dir)/.idlerc/config-extensions.cfg
(user home dir)/.idlerc/config-highlight.cfg
(user home dir)/.idlerc/config-keys.cfg
"""
def __init__(self):
self.defaultCfg={}
self.userCfg={}
self.cfg={}
self.CreateConfigHandlers()
self.LoadCfgFiles()
#self.LoadCfg()
def CreateConfigHandlers(self):
"""
set up a dictionary of config parsers for default and user
configurations respectively
"""
#build idle install path
if __name__ != '__main__': # we were imported
idleDir=os.path.dirname(__file__)
else: # we were exec'ed (for testing only)
idleDir=os.path.abspath(sys.path[0])
userDir=self.GetUserCfgDir()
configTypes=('main','extensions','highlight','keys')
defCfgFiles={}
usrCfgFiles={}
for cfgType in configTypes: #build config file names
defCfgFiles[cfgType]=os.path.join(idleDir,'config-'+cfgType+'.def')
usrCfgFiles[cfgType]=os.path.join(userDir,'config-'+cfgType+'.cfg')
for cfgType in configTypes: #create config parsers
self.defaultCfg[cfgType]=IdleConfParser(defCfgFiles[cfgType])
self.userCfg[cfgType]=IdleUserConfParser(usrCfgFiles[cfgType])
def GetUserCfgDir(self):
"""
Creates (if required) and returns a filesystem directory for storing
user config files.
"""
cfgDir = '.idlerc'
userDir = os.path.expanduser('~')
if userDir != '~': # expanduser() found user home dir
if not os.path.exists(userDir):
warn = ('\n Warning: os.path.expanduser("~") points to\n '+
userDir+',\n but the path does not exist.\n')
try:
sys.stderr.write(warn)
except IOError:
pass
userDir = '~'
if userDir == "~": # still no path to home!
# traditionally IDLE has defaulted to os.getcwd(), is this adequate?
userDir = os.getcwd()
userDir = os.path.join(userDir, cfgDir)
if not os.path.exists(userDir):
try:
os.mkdir(userDir)
except (OSError, IOError):
warn = ('\n Warning: unable to create user config directory\n'+
userDir+'\n Check path and permissions.\n Exiting!\n\n')
sys.stderr.write(warn)
raise SystemExit
return userDir
def GetOption(self, configType, section, option, default=None, type=None,
warn_on_default=True, raw=False):
"""
Get an option value for given config type and given general
configuration section/option or return a default. If type is specified,
return as type. Firstly the user configuration is checked, with a
fallback to the default configuration, and a final 'catch all'
fallback to a useable passed-in default if the option isn't present in
either the user or the default configuration.
configType must be one of ('main','extensions','highlight','keys')
If a default is returned, and warn_on_default is True, a warning is
printed to stderr.
"""
if self.userCfg[configType].has_option(section,option):
return self.userCfg[configType].Get(section, option,
type=type, raw=raw)
elif self.defaultCfg[configType].has_option(section,option):
return self.defaultCfg[configType].Get(section, option,
type=type, raw=raw)
else: #returning default, print warning
if warn_on_default:
warning = ('\n Warning: configHandler.py - IdleConf.GetOption -\n'
' problem retrieving configuration option %r\n'
' from section %r.\n'
' returning default value: %r\n' %
(option, section, default))
try:
sys.stderr.write(warning)
except IOError:
pass
return default
def SetOption(self, configType, section, option, value):
"""In user's config file, set section's option to value.
"""
self.userCfg[configType].SetOption(section, option, value)
def GetSectionList(self, configSet, configType):
"""
Get a list of sections from either the user or default config for
the given config type.
configSet must be either 'user' or 'default'
configType must be one of ('main','extensions','highlight','keys')
"""
if not (configType in ('main','extensions','highlight','keys')):
raise InvalidConfigType('Invalid configType specified')
if configSet == 'user':
cfgParser=self.userCfg[configType]
elif configSet == 'default':
cfgParser=self.defaultCfg[configType]
else:
raise InvalidConfigSet('Invalid configSet specified')
return cfgParser.sections()
def GetHighlight(self, theme, element, fgBg=None):
"""
return individual highlighting theme elements.
fgBg - string ('fg'or'bg') or None, if None return a dictionary
containing fg and bg colours (appropriate for passing to Tkinter in,
e.g., a tag_config call), otherwise fg or bg colour only as specified.
"""
if self.defaultCfg['highlight'].has_section(theme):
themeDict=self.GetThemeDict('default',theme)
else:
themeDict=self.GetThemeDict('user',theme)
fore=themeDict[element+'-foreground']
if element=='cursor': #there is no config value for cursor bg
back=themeDict['normal-background']
else:
back=themeDict[element+'-background']
highlight={"foreground": fore,"background": back}
if not fgBg: #return dict of both colours
return highlight
else: #return specified colour only
if fgBg == 'fg':
return highlight["foreground"]
if fgBg == 'bg':
return highlight["background"]
else:
raise InvalidFgBg('Invalid fgBg specified')
def GetThemeDict(self,type,themeName):
"""
type - string, 'default' or 'user' theme type
themeName - string, theme name
Returns a dictionary which holds {option:value} for each element
in the specified theme. Values are loaded over a set of ultimate last
fallback defaults to guarantee that all theme elements are present in
a newly created theme.
"""
if type == 'user':
cfgParser=self.userCfg['highlight']
elif type == 'default':
cfgParser=self.defaultCfg['highlight']
else:
raise InvalidTheme('Invalid theme type specified')
#foreground and background values are provded for each theme element
#(apart from cursor) even though all these values are not yet used
#by idle, to allow for their use in the future. Default values are
#generally black and white.
theme={ 'normal-foreground':'#000000',
'normal-background':'#ffffff',
'keyword-foreground':'#000000',
'keyword-background':'#ffffff',
'builtin-foreground':'#000000',
'builtin-background':'#ffffff',
'comment-foreground':'#000000',
'comment-background':'#ffffff',
'string-foreground':'#000000',
'string-background':'#ffffff',
'definition-foreground':'#000000',
'definition-background':'#ffffff',
'hilite-foreground':'#000000',
'hilite-background':'gray',
'break-foreground':'#ffffff',
'break-background':'#000000',
'hit-foreground':'#ffffff',
'hit-background':'#000000',
'error-foreground':'#ffffff',
'error-background':'#000000',
#cursor (only foreground can be set)
'cursor-foreground':'#000000',
#shell window
'stdout-foreground':'#000000',
'stdout-background':'#ffffff',
'stderr-foreground':'#000000',
'stderr-background':'#ffffff',
'console-foreground':'#000000',
'console-background':'#ffffff' }
for element in theme:
if not cfgParser.has_option(themeName,element):
#we are going to return a default, print warning
warning=('\n Warning: configHandler.py - IdleConf.GetThemeDict'
' -\n problem retrieving theme element %r'
'\n from theme %r.\n'
' returning default value: %r\n' %
(element, themeName, theme[element]))
try:
sys.stderr.write(warning)
except IOError:
pass
colour=cfgParser.Get(themeName,element,default=theme[element])
theme[element]=colour
return theme
def CurrentTheme(self):
"""
Returns the name of the currently active theme
"""
return self.GetOption('main','Theme','name',default='')
def CurrentKeys(self):
"""
Returns the name of the currently active key set
"""
return self.GetOption('main','Keys','name',default='')
def GetExtensions(self, active_only=True, editor_only=False, shell_only=False):
"""
Gets a list of all idle extensions declared in the config files.
active_only - boolean, if true only return active (enabled) extensions
"""
extns=self.RemoveKeyBindNames(
self.GetSectionList('default','extensions'))
userExtns=self.RemoveKeyBindNames(
self.GetSectionList('user','extensions'))
for extn in userExtns:
if extn not in extns: #user has added own extension
extns.append(extn)
if active_only:
activeExtns=[]
for extn in extns:
if self.GetOption('extensions', extn, 'enable', default=True,
type='bool'):
#the extension is enabled
if editor_only or shell_only:
if editor_only:
option = "enable_editor"
else:
option = "enable_shell"
if self.GetOption('extensions', extn,option,
default=True, type='bool',
warn_on_default=False):
activeExtns.append(extn)
else:
activeExtns.append(extn)
return activeExtns
else:
return extns
def RemoveKeyBindNames(self,extnNameList):
#get rid of keybinding section names
names=extnNameList
kbNameIndicies=[]
for name in names:
if name.endswith(('_bindings', '_cfgBindings')):
kbNameIndicies.append(names.index(name))
kbNameIndicies.sort()
kbNameIndicies.reverse()
for index in kbNameIndicies: #delete each keybinding section name
del(names[index])
return names
def GetExtnNameForEvent(self,virtualEvent):
"""
Returns the name of the extension that virtualEvent is bound in, or
None if not bound in any extension.
virtualEvent - string, name of the virtual event to test for, without
the enclosing '<< >>'
"""
extName=None
vEvent='<<'+virtualEvent+'>>'
for extn in self.GetExtensions(active_only=0):
for event in self.GetExtensionKeys(extn):
if event == vEvent:
extName=extn
return extName
def GetExtensionKeys(self,extensionName):
"""
returns a dictionary of the configurable keybindings for a particular
extension,as they exist in the dictionary returned by GetCurrentKeySet;
that is, where previously used bindings are disabled.
"""
keysName=extensionName+'_cfgBindings'
activeKeys=self.GetCurrentKeySet()
extKeys={}
if self.defaultCfg['extensions'].has_section(keysName):
eventNames=self.defaultCfg['extensions'].GetOptionList(keysName)
for eventName in eventNames:
event='<<'+eventName+'>>'
binding=activeKeys[event]
extKeys[event]=binding
return extKeys
def __GetRawExtensionKeys(self,extensionName):
"""
returns a dictionary of the configurable keybindings for a particular
extension, as defined in the configuration files, or an empty dictionary
if no bindings are found
"""
keysName=extensionName+'_cfgBindings'
extKeys={}
if self.defaultCfg['extensions'].has_section(keysName):
eventNames=self.defaultCfg['extensions'].GetOptionList(keysName)
for eventName in eventNames:
binding=self.GetOption('extensions',keysName,
eventName,default='').split()
event='<<'+eventName+'>>'
extKeys[event]=binding
return extKeys
def GetExtensionBindings(self,extensionName):
"""
Returns a dictionary of all the event bindings for a particular
extension. The configurable keybindings are returned as they exist in
the dictionary returned by GetCurrentKeySet; that is, where re-used
keybindings are disabled.
"""
bindsName=extensionName+'_bindings'
extBinds=self.GetExtensionKeys(extensionName)
#add the non-configurable bindings
if self.defaultCfg['extensions'].has_section(bindsName):
eventNames=self.defaultCfg['extensions'].GetOptionList(bindsName)
for eventName in eventNames:
binding=self.GetOption('extensions',bindsName,
eventName,default='').split()
event='<<'+eventName+'>>'
extBinds[event]=binding
return extBinds
def GetKeyBinding(self, keySetName, eventStr):
"""
returns the keybinding for a specific event.
keySetName - string, name of key binding set
eventStr - string, the virtual event we want the binding for,
represented as a string, eg. '<<event>>'
"""
eventName=eventStr[2:-2] #trim off the angle brackets
binding=self.GetOption('keys',keySetName,eventName,default='').split()
return binding
def GetCurrentKeySet(self):
result = self.GetKeySet(self.CurrentKeys())
if macosxSupport.runningAsOSXApp():
# We're using AquaTk, replace all keybingings that use the
# Alt key by ones that use the Option key because the former
# don't work reliably.
for k, v in result.items():
v2 = [ x.replace('<Alt-', '<Option-') for x in v ]
if v != v2:
result[k] = v2
return result
def GetKeySet(self,keySetName):
"""
Returns a dictionary of: all requested core keybindings, plus the
keybindings for all currently active extensions. If a binding defined
in an extension is already in use, that binding is disabled.
"""
keySet=self.GetCoreKeys(keySetName)
activeExtns=self.GetExtensions(active_only=1)
for extn in activeExtns:
extKeys=self.__GetRawExtensionKeys(extn)
if extKeys: #the extension defines keybindings
for event in extKeys:
if extKeys[event] in keySet.values():
#the binding is already in use
extKeys[event]='' #disable this binding
keySet[event]=extKeys[event] #add binding
return keySet
def IsCoreBinding(self,virtualEvent):
"""
returns true if the virtual event is bound in the core idle keybindings.
virtualEvent - string, name of the virtual event to test for, without
the enclosing '<< >>'
"""
return ('<<'+virtualEvent+'>>') in self.GetCoreKeys()
def GetCoreKeys(self, keySetName=None):
"""
returns the requested set of core keybindings, with fallbacks if
required.
Keybindings loaded from the config file(s) are loaded _over_ these
defaults, so if there is a problem getting any core binding there will
be an 'ultimate last resort fallback' to the CUA-ish bindings
defined here.
"""
keyBindings={
'<<copy>>': ['<Control-c>', '<Control-C>'],
'<<cut>>': ['<Control-x>', '<Control-X>'],
'<<paste>>': ['<Control-v>', '<Control-V>'],
'<<beginning-of-line>>': ['<Control-a>', '<Home>'],
'<<center-insert>>': ['<Control-l>'],
'<<close-all-windows>>': ['<Control-q>'],
'<<close-window>>': ['<Alt-F4>'],
'<<do-nothing>>': ['<Control-x>'],
'<<end-of-file>>': ['<Control-d>'],
'<<python-docs>>': ['<F1>'],
'<<python-context-help>>': ['<Shift-F1>'],
'<<history-next>>': ['<Alt-n>'],
'<<history-previous>>': ['<Alt-p>'],
'<<interrupt-execution>>': ['<Control-c>'],
'<<view-restart>>': ['<F6>'],
'<<restart-shell>>': ['<Control-F6>'],
'<<open-class-browser>>': ['<Alt-c>'],
'<<open-module>>': ['<Alt-m>'],
'<<open-new-window>>': ['<Control-n>'],
'<<open-window-from-file>>': ['<Control-o>'],
'<<plain-newline-and-indent>>': ['<Control-j>'],
'<<print-window>>': ['<Control-p>'],
'<<redo>>': ['<Control-y>'],
'<<remove-selection>>': ['<Escape>'],
'<<save-copy-of-window-as-file>>': ['<Alt-Shift-S>'],
'<<save-window-as-file>>': ['<Alt-s>'],
'<<save-window>>': ['<Control-s>'],
'<<select-all>>': ['<Alt-a>'],
'<<toggle-auto-coloring>>': ['<Control-slash>'],
'<<undo>>': ['<Control-z>'],
'<<find-again>>': ['<Control-g>', '<F3>'],
'<<find-in-files>>': ['<Alt-F3>'],
'<<find-selection>>': ['<Control-F3>'],
'<<find>>': ['<Control-f>'],
'<<replace>>': ['<Control-h>'],
'<<goto-line>>': ['<Alt-g>'],
'<<smart-backspace>>': ['<Key-BackSpace>'],
'<<newline-and-indent>>': ['<Key-Return> <Key-KP_Enter>'],
'<<smart-indent>>': ['<Key-Tab>'],
'<<indent-region>>': ['<Control-Key-bracketright>'],
'<<dedent-region>>': ['<Control-Key-bracketleft>'],
'<<comment-region>>': ['<Alt-Key-3>'],
'<<uncomment-region>>': ['<Alt-Key-4>'],
'<<tabify-region>>': ['<Alt-Key-5>'],
'<<untabify-region>>': ['<Alt-Key-6>'],
'<<toggle-tabs>>': ['<Alt-Key-t>'],
'<<change-indentwidth>>': ['<Alt-Key-u>'],
'<<del-word-left>>': ['<Control-Key-BackSpace>'],
'<<del-word-right>>': ['<Control-Key-Delete>']
}
if keySetName:
for event in keyBindings:
binding=self.GetKeyBinding(keySetName,event)
if binding:
keyBindings[event]=binding
else: #we are going to return a default, print warning
warning=('\n Warning: configHandler.py - IdleConf.GetCoreKeys'
' -\n problem retrieving key binding for event %r'
'\n from key set %r.\n'
' returning default value: %r\n' %
(event, keySetName, keyBindings[event]))
try:
sys.stderr.write(warning)
except IOError:
pass
return keyBindings
def GetExtraHelpSourceList(self,configSet):
"""Fetch list of extra help sources from a given configSet.
Valid configSets are 'user' or 'default'. Return a list of tuples of
the form (menu_item , path_to_help_file , option), or return the empty
list. 'option' is the sequence number of the help resource. 'option'
values determine the position of the menu items on the Help menu,
therefore the returned list must be sorted by 'option'.
"""
helpSources=[]
if configSet=='user':
cfgParser=self.userCfg['main']
elif configSet=='default':
cfgParser=self.defaultCfg['main']
else:
raise InvalidConfigSet('Invalid configSet specified')
options=cfgParser.GetOptionList('HelpFiles')
for option in options:
value=cfgParser.Get('HelpFiles',option,default=';')
if value.find(';')==-1: #malformed config entry with no ';'
menuItem='' #make these empty
helpPath='' #so value won't be added to list
else: #config entry contains ';' as expected
value=value.split(';')
menuItem=value[0].strip()
helpPath=value[1].strip()
if menuItem and helpPath: #neither are empty strings
helpSources.append( (menuItem,helpPath,option) )
helpSources.sort(key=lambda x: x[2])
return helpSources
def GetAllExtraHelpSourcesList(self):
"""
Returns a list of tuples containing the details of all additional help
sources configured, or an empty list if there are none. Tuples are of
the format returned by GetExtraHelpSourceList.
"""
allHelpSources=( self.GetExtraHelpSourceList('default')+
self.GetExtraHelpSourceList('user') )
return allHelpSources
def LoadCfgFiles(self):
"""
load all configuration files.
"""
for key in self.defaultCfg:
self.defaultCfg[key].Load()
self.userCfg[key].Load() #same keys
def SaveUserCfgFiles(self):
"""
write all loaded user configuration files back to disk
"""
for key in self.userCfg:
self.userCfg[key].Save()
idleConf=IdleConf()
### module test
if __name__ == '__main__':
def dumpCfg(cfg):
print('\n',cfg,'\n')
for key in cfg:
sections=cfg[key].sections()
print(key)
print(sections)
for section in sections:
options=cfg[key].options(section)
print(section)
print(options)
for option in options:
print(option, '=', cfg[key].Get(section,option))
dumpCfg(idleConf.defaultCfg)
dumpCfg(idleConf.userCfg)
print(idleConf.userCfg['main'].Get('Theme','name'))
#print idleConf.userCfg['highlight'].GetDefHighlight('Foo','normal')
| apache-2.0 | -7,320,093,801,106,819,000 | 39.809659 | 83 | 0.563174 | false |
ASlave2Audio/Restaurant-App | mingw/bin/lib/lib2to3/pytree.py | 325 | 29039 | # Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""
Python parse tree definitions.
This is a very concrete parse tree; we need to keep every token and
even the comments and whitespace between tokens.
There's also a pattern matching implementation here.
"""
__author__ = "Guido van Rossum <[email protected]>"
import sys
import warnings
from StringIO import StringIO
HUGE = 0x7FFFFFFF # maximum repeat count, default max
_type_reprs = {}
def type_repr(type_num):
global _type_reprs
if not _type_reprs:
from .pygram import python_symbols
# printing tokens is possible but not as useful
# from .pgen2 import token // token.__dict__.items():
for name, val in python_symbols.__dict__.items():
if type(val) == int: _type_reprs[val] = name
return _type_reprs.setdefault(type_num, type_num)
class Base(object):
"""
Abstract base class for Node and Leaf.
This provides some default functionality and boilerplate using the
template pattern.
A node may be a subnode of at most one parent.
"""
# Default values for instance variables
type = None # int: token number (< 256) or symbol number (>= 256)
parent = None # Parent node pointer, or None
children = () # Tuple of subnodes
was_changed = False
was_checked = False
def __new__(cls, *args, **kwds):
"""Constructor that prevents Base from being instantiated."""
assert cls is not Base, "Cannot instantiate Base"
return object.__new__(cls)
def __eq__(self, other):
"""
Compare two nodes for equality.
This calls the method _eq().
"""
if self.__class__ is not other.__class__:
return NotImplemented
return self._eq(other)
__hash__ = None # For Py3 compatibility.
def __ne__(self, other):
"""
Compare two nodes for inequality.
This calls the method _eq().
"""
if self.__class__ is not other.__class__:
return NotImplemented
return not self._eq(other)
def _eq(self, other):
"""
Compare two nodes for equality.
This is called by __eq__ and __ne__. It is only called if the two nodes
have the same type. This must be implemented by the concrete subclass.
Nodes should be considered equal if they have the same structure,
ignoring the prefix string and other context information.
"""
raise NotImplementedError
def clone(self):
"""
Return a cloned (deep) copy of self.
This must be implemented by the concrete subclass.
"""
raise NotImplementedError
def post_order(self):
"""
Return a post-order iterator for the tree.
This must be implemented by the concrete subclass.
"""
raise NotImplementedError
def pre_order(self):
"""
Return a pre-order iterator for the tree.
This must be implemented by the concrete subclass.
"""
raise NotImplementedError
def set_prefix(self, prefix):
"""
Set the prefix for the node (see Leaf class).
DEPRECATED; use the prefix property directly.
"""
warnings.warn("set_prefix() is deprecated; use the prefix property",
DeprecationWarning, stacklevel=2)
self.prefix = prefix
def get_prefix(self):
"""
Return the prefix for the node (see Leaf class).
DEPRECATED; use the prefix property directly.
"""
warnings.warn("get_prefix() is deprecated; use the prefix property",
DeprecationWarning, stacklevel=2)
return self.prefix
def replace(self, new):
"""Replace this node with a new one in the parent."""
assert self.parent is not None, str(self)
assert new is not None
if not isinstance(new, list):
new = [new]
l_children = []
found = False
for ch in self.parent.children:
if ch is self:
assert not found, (self.parent.children, self, new)
if new is not None:
l_children.extend(new)
found = True
else:
l_children.append(ch)
assert found, (self.children, self, new)
self.parent.changed()
self.parent.children = l_children
for x in new:
x.parent = self.parent
self.parent = None
def get_lineno(self):
"""Return the line number which generated the invocant node."""
node = self
while not isinstance(node, Leaf):
if not node.children:
return
node = node.children[0]
return node.lineno
def changed(self):
if self.parent:
self.parent.changed()
self.was_changed = True
def remove(self):
"""
Remove the node from the tree. Returns the position of the node in its
parent's children before it was removed.
"""
if self.parent:
for i, node in enumerate(self.parent.children):
if node is self:
self.parent.changed()
del self.parent.children[i]
self.parent = None
return i
@property
def next_sibling(self):
"""
The node immediately following the invocant in their parent's children
list. If the invocant does not have a next sibling, it is None
"""
if self.parent is None:
return None
# Can't use index(); we need to test by identity
for i, child in enumerate(self.parent.children):
if child is self:
try:
return self.parent.children[i+1]
except IndexError:
return None
@property
def prev_sibling(self):
"""
The node immediately preceding the invocant in their parent's children
list. If the invocant does not have a previous sibling, it is None.
"""
if self.parent is None:
return None
# Can't use index(); we need to test by identity
for i, child in enumerate(self.parent.children):
if child is self:
if i == 0:
return None
return self.parent.children[i-1]
def leaves(self):
for child in self.children:
for x in child.leaves():
yield x
def depth(self):
if self.parent is None:
return 0
return 1 + self.parent.depth()
def get_suffix(self):
"""
Return the string immediately following the invocant node. This is
effectively equivalent to node.next_sibling.prefix
"""
next_sib = self.next_sibling
if next_sib is None:
return u""
return next_sib.prefix
if sys.version_info < (3, 0):
def __str__(self):
return unicode(self).encode("ascii")
class Node(Base):
"""Concrete implementation for interior nodes."""
def __init__(self,type, children,
context=None,
prefix=None,
fixers_applied=None):
"""
Initializer.
Takes a type constant (a symbol number >= 256), a sequence of
child nodes, and an optional context keyword argument.
As a side effect, the parent pointers of the children are updated.
"""
assert type >= 256, type
self.type = type
self.children = list(children)
for ch in self.children:
assert ch.parent is None, repr(ch)
ch.parent = self
if prefix is not None:
self.prefix = prefix
if fixers_applied:
self.fixers_applied = fixers_applied[:]
else:
self.fixers_applied = None
def __repr__(self):
"""Return a canonical string representation."""
return "%s(%s, %r)" % (self.__class__.__name__,
type_repr(self.type),
self.children)
def __unicode__(self):
"""
Return a pretty string representation.
This reproduces the input source exactly.
"""
return u"".join(map(unicode, self.children))
if sys.version_info > (3, 0):
__str__ = __unicode__
def _eq(self, other):
"""Compare two nodes for equality."""
return (self.type, self.children) == (other.type, other.children)
def clone(self):
"""Return a cloned (deep) copy of self."""
return Node(self.type, [ch.clone() for ch in self.children],
fixers_applied=self.fixers_applied)
def post_order(self):
"""Return a post-order iterator for the tree."""
for child in self.children:
for node in child.post_order():
yield node
yield self
def pre_order(self):
"""Return a pre-order iterator for the tree."""
yield self
for child in self.children:
for node in child.pre_order():
yield node
def _prefix_getter(self):
"""
The whitespace and comments preceding this node in the input.
"""
if not self.children:
return ""
return self.children[0].prefix
def _prefix_setter(self, prefix):
if self.children:
self.children[0].prefix = prefix
prefix = property(_prefix_getter, _prefix_setter)
def set_child(self, i, child):
"""
Equivalent to 'node.children[i] = child'. This method also sets the
child's parent attribute appropriately.
"""
child.parent = self
self.children[i].parent = None
self.children[i] = child
self.changed()
def insert_child(self, i, child):
"""
Equivalent to 'node.children.insert(i, child)'. This method also sets
the child's parent attribute appropriately.
"""
child.parent = self
self.children.insert(i, child)
self.changed()
def append_child(self, child):
"""
Equivalent to 'node.children.append(child)'. This method also sets the
child's parent attribute appropriately.
"""
child.parent = self
self.children.append(child)
self.changed()
class Leaf(Base):
"""Concrete implementation for leaf nodes."""
# Default values for instance variables
_prefix = "" # Whitespace and comments preceding this token in the input
lineno = 0 # Line where this token starts in the input
column = 0 # Column where this token tarts in the input
def __init__(self, type, value,
context=None,
prefix=None,
fixers_applied=[]):
"""
Initializer.
Takes a type constant (a token number < 256), a string value, and an
optional context keyword argument.
"""
assert 0 <= type < 256, type
if context is not None:
self._prefix, (self.lineno, self.column) = context
self.type = type
self.value = value
if prefix is not None:
self._prefix = prefix
self.fixers_applied = fixers_applied[:]
def __repr__(self):
"""Return a canonical string representation."""
return "%s(%r, %r)" % (self.__class__.__name__,
self.type,
self.value)
def __unicode__(self):
"""
Return a pretty string representation.
This reproduces the input source exactly.
"""
return self.prefix + unicode(self.value)
if sys.version_info > (3, 0):
__str__ = __unicode__
def _eq(self, other):
"""Compare two nodes for equality."""
return (self.type, self.value) == (other.type, other.value)
def clone(self):
"""Return a cloned (deep) copy of self."""
return Leaf(self.type, self.value,
(self.prefix, (self.lineno, self.column)),
fixers_applied=self.fixers_applied)
def leaves(self):
yield self
def post_order(self):
"""Return a post-order iterator for the tree."""
yield self
def pre_order(self):
"""Return a pre-order iterator for the tree."""
yield self
def _prefix_getter(self):
"""
The whitespace and comments preceding this token in the input.
"""
return self._prefix
def _prefix_setter(self, prefix):
self.changed()
self._prefix = prefix
prefix = property(_prefix_getter, _prefix_setter)
def convert(gr, raw_node):
"""
Convert raw node information to a Node or Leaf instance.
This is passed to the parser driver which calls it whenever a reduction of a
grammar rule produces a new complete node, so that the tree is build
strictly bottom-up.
"""
type, value, context, children = raw_node
if children or type in gr.number2symbol:
# If there's exactly one child, return that child instead of
# creating a new node.
if len(children) == 1:
return children[0]
return Node(type, children, context=context)
else:
return Leaf(type, value, context=context)
class BasePattern(object):
"""
A pattern is a tree matching pattern.
It looks for a specific node type (token or symbol), and
optionally for a specific content.
This is an abstract base class. There are three concrete
subclasses:
- LeafPattern matches a single leaf node;
- NodePattern matches a single node (usually non-leaf);
- WildcardPattern matches a sequence of nodes of variable length.
"""
# Defaults for instance variables
type = None # Node type (token if < 256, symbol if >= 256)
content = None # Optional content matching pattern
name = None # Optional name used to store match in results dict
def __new__(cls, *args, **kwds):
"""Constructor that prevents BasePattern from being instantiated."""
assert cls is not BasePattern, "Cannot instantiate BasePattern"
return object.__new__(cls)
def __repr__(self):
args = [type_repr(self.type), self.content, self.name]
while args and args[-1] is None:
del args[-1]
return "%s(%s)" % (self.__class__.__name__, ", ".join(map(repr, args)))
def optimize(self):
"""
A subclass can define this as a hook for optimizations.
Returns either self or another node with the same effect.
"""
return self
def match(self, node, results=None):
"""
Does this pattern exactly match a node?
Returns True if it matches, False if not.
If results is not None, it must be a dict which will be
updated with the nodes matching named subpatterns.
Default implementation for non-wildcard patterns.
"""
if self.type is not None and node.type != self.type:
return False
if self.content is not None:
r = None
if results is not None:
r = {}
if not self._submatch(node, r):
return False
if r:
results.update(r)
if results is not None and self.name:
results[self.name] = node
return True
def match_seq(self, nodes, results=None):
"""
Does this pattern exactly match a sequence of nodes?
Default implementation for non-wildcard patterns.
"""
if len(nodes) != 1:
return False
return self.match(nodes[0], results)
def generate_matches(self, nodes):
"""
Generator yielding all matches for this pattern.
Default implementation for non-wildcard patterns.
"""
r = {}
if nodes and self.match(nodes[0], r):
yield 1, r
class LeafPattern(BasePattern):
def __init__(self, type=None, content=None, name=None):
"""
Initializer. Takes optional type, content, and name.
The type, if given must be a token type (< 256). If not given,
this matches any *leaf* node; the content may still be required.
The content, if given, must be a string.
If a name is given, the matching node is stored in the results
dict under that key.
"""
if type is not None:
assert 0 <= type < 256, type
if content is not None:
assert isinstance(content, basestring), repr(content)
self.type = type
self.content = content
self.name = name
def match(self, node, results=None):
"""Override match() to insist on a leaf node."""
if not isinstance(node, Leaf):
return False
return BasePattern.match(self, node, results)
def _submatch(self, node, results=None):
"""
Match the pattern's content to the node's children.
This assumes the node type matches and self.content is not None.
Returns True if it matches, False if not.
If results is not None, it must be a dict which will be
updated with the nodes matching named subpatterns.
When returning False, the results dict may still be updated.
"""
return self.content == node.value
class NodePattern(BasePattern):
wildcards = False
def __init__(self, type=None, content=None, name=None):
"""
Initializer. Takes optional type, content, and name.
The type, if given, must be a symbol type (>= 256). If the
type is None this matches *any* single node (leaf or not),
except if content is not None, in which it only matches
non-leaf nodes that also match the content pattern.
The content, if not None, must be a sequence of Patterns that
must match the node's children exactly. If the content is
given, the type must not be None.
If a name is given, the matching node is stored in the results
dict under that key.
"""
if type is not None:
assert type >= 256, type
if content is not None:
assert not isinstance(content, basestring), repr(content)
content = list(content)
for i, item in enumerate(content):
assert isinstance(item, BasePattern), (i, item)
if isinstance(item, WildcardPattern):
self.wildcards = True
self.type = type
self.content = content
self.name = name
def _submatch(self, node, results=None):
"""
Match the pattern's content to the node's children.
This assumes the node type matches and self.content is not None.
Returns True if it matches, False if not.
If results is not None, it must be a dict which will be
updated with the nodes matching named subpatterns.
When returning False, the results dict may still be updated.
"""
if self.wildcards:
for c, r in generate_matches(self.content, node.children):
if c == len(node.children):
if results is not None:
results.update(r)
return True
return False
if len(self.content) != len(node.children):
return False
for subpattern, child in zip(self.content, node.children):
if not subpattern.match(child, results):
return False
return True
class WildcardPattern(BasePattern):
"""
A wildcard pattern can match zero or more nodes.
This has all the flexibility needed to implement patterns like:
.* .+ .? .{m,n}
(a b c | d e | f)
(...)* (...)+ (...)? (...){m,n}
except it always uses non-greedy matching.
"""
def __init__(self, content=None, min=0, max=HUGE, name=None):
"""
Initializer.
Args:
content: optional sequence of subsequences of patterns;
if absent, matches one node;
if present, each subsequence is an alternative [*]
min: optional minimum number of times to match, default 0
max: optional maximum number of times to match, default HUGE
name: optional name assigned to this match
[*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is
equivalent to (a b c | d e | f g h); if content is None,
this is equivalent to '.' in regular expression terms.
The min and max parameters work as follows:
min=0, max=maxint: .*
min=1, max=maxint: .+
min=0, max=1: .?
min=1, max=1: .
If content is not None, replace the dot with the parenthesized
list of alternatives, e.g. (a b c | d e | f g h)*
"""
assert 0 <= min <= max <= HUGE, (min, max)
if content is not None:
content = tuple(map(tuple, content)) # Protect against alterations
# Check sanity of alternatives
assert len(content), repr(content) # Can't have zero alternatives
for alt in content:
assert len(alt), repr(alt) # Can have empty alternatives
self.content = content
self.min = min
self.max = max
self.name = name
def optimize(self):
"""Optimize certain stacked wildcard patterns."""
subpattern = None
if (self.content is not None and
len(self.content) == 1 and len(self.content[0]) == 1):
subpattern = self.content[0][0]
if self.min == 1 and self.max == 1:
if self.content is None:
return NodePattern(name=self.name)
if subpattern is not None and self.name == subpattern.name:
return subpattern.optimize()
if (self.min <= 1 and isinstance(subpattern, WildcardPattern) and
subpattern.min <= 1 and self.name == subpattern.name):
return WildcardPattern(subpattern.content,
self.min*subpattern.min,
self.max*subpattern.max,
subpattern.name)
return self
def match(self, node, results=None):
"""Does this pattern exactly match a node?"""
return self.match_seq([node], results)
def match_seq(self, nodes, results=None):
"""Does this pattern exactly match a sequence of nodes?"""
for c, r in self.generate_matches(nodes):
if c == len(nodes):
if results is not None:
results.update(r)
if self.name:
results[self.name] = list(nodes)
return True
return False
def generate_matches(self, nodes):
"""
Generator yielding matches for a sequence of nodes.
Args:
nodes: sequence of nodes
Yields:
(count, results) tuples where:
count: the match comprises nodes[:count];
results: dict containing named submatches.
"""
if self.content is None:
# Shortcut for special case (see __init__.__doc__)
for count in xrange(self.min, 1 + min(len(nodes), self.max)):
r = {}
if self.name:
r[self.name] = nodes[:count]
yield count, r
elif self.name == "bare_name":
yield self._bare_name_matches(nodes)
else:
# The reason for this is that hitting the recursion limit usually
# results in some ugly messages about how RuntimeErrors are being
# ignored. We don't do this on non-CPython implementation because
# they don't have this problem.
if hasattr(sys, "getrefcount"):
save_stderr = sys.stderr
sys.stderr = StringIO()
try:
for count, r in self._recursive_matches(nodes, 0):
if self.name:
r[self.name] = nodes[:count]
yield count, r
except RuntimeError:
# We fall back to the iterative pattern matching scheme if the recursive
# scheme hits the recursion limit.
for count, r in self._iterative_matches(nodes):
if self.name:
r[self.name] = nodes[:count]
yield count, r
finally:
if hasattr(sys, "getrefcount"):
sys.stderr = save_stderr
def _iterative_matches(self, nodes):
"""Helper to iteratively yield the matches."""
nodelen = len(nodes)
if 0 >= self.min:
yield 0, {}
results = []
# generate matches that use just one alt from self.content
for alt in self.content:
for c, r in generate_matches(alt, nodes):
yield c, r
results.append((c, r))
# for each match, iterate down the nodes
while results:
new_results = []
for c0, r0 in results:
# stop if the entire set of nodes has been matched
if c0 < nodelen and c0 <= self.max:
for alt in self.content:
for c1, r1 in generate_matches(alt, nodes[c0:]):
if c1 > 0:
r = {}
r.update(r0)
r.update(r1)
yield c0 + c1, r
new_results.append((c0 + c1, r))
results = new_results
def _bare_name_matches(self, nodes):
"""Special optimized matcher for bare_name."""
count = 0
r = {}
done = False
max = len(nodes)
while not done and count < max:
done = True
for leaf in self.content:
if leaf[0].match(nodes[count], r):
count += 1
done = False
break
r[self.name] = nodes[:count]
return count, r
def _recursive_matches(self, nodes, count):
"""Helper to recursively yield the matches."""
assert self.content is not None
if count >= self.min:
yield 0, {}
if count < self.max:
for alt in self.content:
for c0, r0 in generate_matches(alt, nodes):
for c1, r1 in self._recursive_matches(nodes[c0:], count+1):
r = {}
r.update(r0)
r.update(r1)
yield c0 + c1, r
class NegatedPattern(BasePattern):
def __init__(self, content=None):
"""
Initializer.
The argument is either a pattern or None. If it is None, this
only matches an empty sequence (effectively '$' in regex
lingo). If it is not None, this matches whenever the argument
pattern doesn't have any matches.
"""
if content is not None:
assert isinstance(content, BasePattern), repr(content)
self.content = content
def match(self, node):
# We never match a node in its entirety
return False
def match_seq(self, nodes):
# We only match an empty sequence of nodes in its entirety
return len(nodes) == 0
def generate_matches(self, nodes):
if self.content is None:
# Return a match if there is an empty sequence
if len(nodes) == 0:
yield 0, {}
else:
# Return a match if the argument pattern has no matches
for c, r in self.content.generate_matches(nodes):
return
yield 0, {}
def generate_matches(patterns, nodes):
"""
Generator yielding matches for a sequence of patterns and nodes.
Args:
patterns: a sequence of patterns
nodes: a sequence of nodes
Yields:
(count, results) tuples where:
count: the entire sequence of patterns matches nodes[:count];
results: dict containing named submatches.
"""
if not patterns:
yield 0, {}
else:
p, rest = patterns[0], patterns[1:]
for c0, r0 in p.generate_matches(nodes):
if not rest:
yield c0, r0
else:
for c1, r1 in generate_matches(rest, nodes[c0:]):
r = {}
r.update(r0)
r.update(r1)
yield c0 + c1, r
| mit | 7,885,396,676,802,129,000 | 31.738444 | 88 | 0.555322 | false |
ryfeus/lambda-packs | Tensorflow_Pandas_Numpy/source3.6/tensorflow/contrib/opt/python/training/nadam_optimizer.py | 57 | 4017 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Nadam for TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.training import adam
from tensorflow.python.training import training_ops
class NadamOptimizer(adam.AdamOptimizer):
"""Optimizer that implements the Nadam algorithm.
See [Dozat, T., 2015](http://cs229.stanford.edu/proj2015/054_report.pdf).
"""
def _apply_dense(self, grad, var):
m = self.get_slot(var, "m")
v = self.get_slot(var, "v")
return training_ops.apply_adam(
var,
m,
v,
math_ops.cast(self._beta1_power, var.dtype.base_dtype),
math_ops.cast(self._beta2_power, var.dtype.base_dtype),
math_ops.cast(self._lr_t, var.dtype.base_dtype),
math_ops.cast(self._beta1_t, var.dtype.base_dtype),
math_ops.cast(self._beta2_t, var.dtype.base_dtype),
math_ops.cast(self._epsilon_t, var.dtype.base_dtype),
grad,
use_locking=self._use_locking,
use_nesterov=True).op
def _resource_apply_dense(self, grad, var):
m = self.get_slot(var, "m")
v = self.get_slot(var, "v")
return training_ops.resource_apply_adam(
var.handle,
m.handle,
v.handle,
math_ops.cast(self._beta1_power, grad.dtype.base_dtype),
math_ops.cast(self._beta2_power, grad.dtype.base_dtype),
math_ops.cast(self._lr_t, grad.dtype.base_dtype),
math_ops.cast(self._beta1_t, grad.dtype.base_dtype),
math_ops.cast(self._beta2_t, grad.dtype.base_dtype),
math_ops.cast(self._epsilon_t, grad.dtype.base_dtype),
grad,
use_locking=self._use_locking,
use_nesterov=True)
def _apply_sparse_shared(self, grad, var, indices, scatter_add):
beta1_power = math_ops.cast(self._beta1_power, var.dtype.base_dtype)
beta2_power = math_ops.cast(self._beta2_power, var.dtype.base_dtype)
lr_t = math_ops.cast(self._lr_t, var.dtype.base_dtype)
beta1_t = math_ops.cast(self._beta1_t, var.dtype.base_dtype)
beta2_t = math_ops.cast(self._beta2_t, var.dtype.base_dtype)
epsilon_t = math_ops.cast(self._epsilon_t, var.dtype.base_dtype)
lr = (lr_t * math_ops.sqrt(1 - beta2_power) / (1 - beta1_power))
# m_t = beta1 * m + (1 - beta1) * g_t
m = self.get_slot(var, "m")
m_scaled_g_values = grad * (1 - beta1_t)
m_t = state_ops.assign(m, m * beta1_t, use_locking=self._use_locking)
with ops.control_dependencies([m_t]):
m_t = scatter_add(m, indices, m_scaled_g_values)
# m_bar = (1 - beta1) * g_t + beta1 * m_t
m_bar = m_scaled_g_values + beta1_t * m_t
# v_t = beta2 * v + (1 - beta2) * (g_t * g_t)
v = self.get_slot(var, "v")
v_scaled_g_values = (grad * grad) * (1 - beta2_t)
v_t = state_ops.assign(v, v * beta2_t, use_locking=self._use_locking)
with ops.control_dependencies([v_t]):
v_t = scatter_add(v, indices, v_scaled_g_values)
v_sqrt = math_ops.sqrt(v_t)
var_update = state_ops.assign_sub(
var, lr * m_bar / (v_sqrt + epsilon_t), use_locking=self._use_locking)
return control_flow_ops.group(*[var_update, m_bar, v_t])
| mit | 7,756,948,106,105,243,000 | 42.193548 | 80 | 0.643266 | false |
partofthething/home-assistant | homeassistant/components/hlk_sw16/switch.py | 16 | 1200 | """Support for HLK-SW16 switches."""
from homeassistant.components.switch import ToggleEntity
from . import DATA_DEVICE_REGISTER, SW16Device
from .const import DOMAIN
PARALLEL_UPDATES = 0
def devices_from_entities(hass, entry):
"""Parse configuration and add HLK-SW16 switch devices."""
device_client = hass.data[DOMAIN][entry.entry_id][DATA_DEVICE_REGISTER]
devices = []
for i in range(16):
device_port = f"{i:01x}"
device = SW16Switch(device_port, entry.entry_id, device_client)
devices.append(device)
return devices
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the HLK-SW16 platform."""
async_add_entities(devices_from_entities(hass, entry))
class SW16Switch(SW16Device, ToggleEntity):
"""Representation of a HLK-SW16 switch."""
@property
def is_on(self):
"""Return true if device is on."""
return self._is_on
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
await self._client.turn_on(self._device_port)
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._client.turn_off(self._device_port)
| mit | 2,793,657,185,546,965,500 | 29 | 75 | 0.664167 | false |
srom/chessbot | estimator/train/parse_logs/__main__.py | 1 | 1947 | from __future__ import unicode_literals
import argparse
import logging
import re
logger = logging.getLogger(__name__)
def main(log_path):
for log_line in yield_train_log_line(log_path):
print log_line
break
class TrainLogLine(object):
__slots__ = ('iteration', 'elapsed', 'test_loss', 'train_loss', 'best', 'best_iteration')
def __init__(self, **kwargs):
for key, value in kwargs.iteritems():
setattr(self, key, value)
def __unicode__(self):
return (
'Training batch {iteration}; ' +
'Elapsed {elapsed}; ' +
'loss: {test_loss} (train: {train_loss}); ' +
'best: {best} ({best_iteration})'
).format(**self.to_dict())
def __repr__(self):
return self.__unicode__()
def to_dict(self):
return {
key: getattr(self, key)
for key in self.__slots__
if hasattr(self, key)
}
def yield_train_log_line(log_path):
with open(log_path, 'r') as f:
for line in f:
if is_train_log_line(line):
yield parse_line(line)
def parse_line(line):
r = r'^.*Training batch ([0-9]+); Elapsed ([0-9]+)s; loss: ([0-9\.]+) \(train: ([0-9\.]+)\); best: ([0-9\.]+) \(([0-9]+)\).*$'
m = re.match(r, line)
if m is None:
raise ValueError('No match for line {}'.format(line))
return TrainLogLine(
iteration=m.group(1),
elapsed=m.group(2),
test_loss=m.group(3),
train_loss=m.group(4),
best=m.group(5),
best_iteration=m.group(6),
)
def is_train_log_line(line):
return re.search('Training batch', line) is not None
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO, format="%(asctime)s (%(levelname)s) %(message)s")
parser = argparse.ArgumentParser()
parser.add_argument('log_path')
args = parser.parse_args()
main(args.log_path)
| mit | 6,165,180,149,807,101,000 | 24.618421 | 130 | 0.54905 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.