repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
shyamalschandra/picochess | server.py | 2 | 9171 | #!/usr/bin/env python
# Copyright (C) 2013-2014 Jean-Francois Romang ([email protected])
# Shivkumar Shivaji ()
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import threading
from flask import Flask
import tornado.web
import tornado.wsgi
from tornado.websocket import WebSocketHandler
from tornado.ioloop import IOLoop
from multiprocessing.pool import ThreadPool
from utilities import *
import queue
from web.picoweb import picoweb as pw
import chess.pgn as pgn
import json
import datetime
_workers = ThreadPool(5)
class ChannelHandler(tornado.web.RequestHandler):
def initialize(self, shared=None):
self.shared = shared
def post(self):
action = self.get_argument("action")
# print("action: {0}".format(action))
# $.post("/channel", { action: "broadcast", fen: currentPosition.fen, pgn: pgnEl[0].innerText}, function (data) {
if action == 'broadcast':
fen = self.get_argument("fen")
# print("fen: {0}".format(fen))
move_stack = self.get_argument("moveStack")
move_stack = json.loads(move_stack)
game = pgn.Game()
self.create_game_header(game)
tmp = game
# move_stack = message.game.move_stack
for move in move_stack:
tmp = tmp.add_variation(tmp.board().parse_san(move))
# print (message.game.move_stack)
exporter = pgn.StringExporter()
game.export(exporter, headers=True, comments=False, variations=False)
# print ("PGN: ")
# print (str(exporter))
# r = {'move': str(message.move), , 'fen': message.game.fen()}
# print("pgn: {0}".format(pgn))
r = {'type': 'broadcast', 'msg': 'Received position from Spectators!', 'pgn': str(exporter), 'fen':fen}
EventHandler.write_to_clients(r)
# if action == 'pause_cloud_engine':
class EventHandler(WebSocketHandler):
clients = set()
def initialize(self, shared=None):
self.shared = shared
def open(self):
EventHandler.clients.add(self)
def on_close(self):
EventHandler.clients.remove(self)
@classmethod
def write_to_clients(cls, msg):
# print "Writing to clients"
for client in cls.clients:
client.write_message(msg)
class DGTHandler(tornado.web.RequestHandler):
def initialize(self, shared=None):
self.shared = shared
def get(self, *args, **kwargs):
action = self.get_argument("action")
if action == "get_last_move":
self.write(self.shared['last_dgt_move_msg'])
class InfoHandler(tornado.web.RequestHandler):
def initialize(self, shared=None):
self.shared = shared
def get(self, *args, **kwargs):
action = self.get_argument("action")
if action == "get_system_info":
# print(self.shared['system_info'])
self.write(self.shared['system_info'])
class PGNHandler(tornado.web.RequestHandler):
def initialize(self, shared=None):
self.shared = shared
def get(self, *args, **kwargs):
action = self.get_argument("action")
# print (action)
if action == "get_pgn_file":
self.set_header('Content-Type', 'text/pgn')
self.set_header('Content-Disposition', 'attachment; filename=game.pgn')
self.write(self.shared['last_dgt_move_msg']['pgn'])
class WebServer(Observable, threading.Thread):
def __init__(self):
shared = {}
WebDisplay(shared).start()
super(WebServer, self).__init__()
wsgi_app = tornado.wsgi.WSGIContainer(pw)
application = tornado.web.Application([
(r'/event', EventHandler, dict(shared=shared)),
(r'/dgt', DGTHandler, dict(shared=shared)),
(r'/pgn', PGNHandler, dict(shared=shared)),
(r'/info', InfoHandler, dict(shared=shared)),
(r'/channel', ChannelHandler, dict(shared=shared)),
(r'.*', tornado.web.FallbackHandler, {'fallback': wsgi_app})
])
application.listen(80)
def run(self):
IOLoop.instance().start()
class WebDisplay(Display, threading.Thread):
def __init__(self, shared):
super(WebDisplay, self).__init__()
self.shared = shared
@staticmethod
def run_background(func, callback, args=(), kwds = None):
if not kwds:
kwds = {}
def _callback(result):
IOLoop.instance().add_callback(lambda: callback(result))
_workers.apply_async(func, args, kwds, _callback)
def create_game_header(self, game):
game.headers["Result"] = "*"
game.headers["White"] = "User"
game.headers["WhiteElo"] = "*"
game.headers["BlackElo"] = "2900"
game.headers["Black"] = "Picochess"
game.headers["Event"] = "Game"
game.headers["EventDate"] = datetime.datetime.now().date().strftime('%Y-%m-%d')
game.headers["Site"] = "Pi"
if 'system_info' in self.shared:
game.headers["Site"] = self.shared['system_info']['location']
if 'game_info' in self.shared:
# game.headers["Result"] = "*"
game.headers["Black"] = "Picochess" if "mode_string" in self.shared["game_info"] and self.shared["game_info"]["mode_string"] == Mode.PLAY_BLACK else "User"
game.headers["White"] = "Picochess" if game.headers["Black"] == "User" else "User"
comp_color = "Black" if game.headers["Black"] == "Picochess" else "White"
if "level" in self.shared["game_info"]:
game.headers[comp_color+"Elo"] = "Level {0}".format(self.shared["game_info"]["level"])
else:
game.headers[comp_color+"Elo"] = "2900"
if "time_control_string" in self.shared["game_info"]:
game.headers["Event"] = "Time " + self.shared["game_info"]["time_control_string"]
# @staticmethod
def create_game_info(self):
if 'game_info' not in self.shared:
self.shared['game_info'] = {}
def task(self, message):
if message == Message.BOOK_MOVE:
EventHandler.write_to_clients({'msg': 'Book move'})
elif message == Message.UCI_OPTION_LIST:
self.shared['uci_options'] = message.options
elif message == Message.SYSTEM_INFO:
self.shared['system_info'] = message.info
elif message == Event.OPENING_BOOK: # Process opening book
self.create_game_info()
self.shared['game_info']['book'] = message.book
elif message == Event.SET_MODE: # Process interaction mode
self.create_game_info()
self.shared['game_info']['mode_string'] = message.mode_string
elif message == Event.SET_TIME_CONTROL:
self.create_game_info()
self.shared['game_info']['time_control_string'] = message.time_control_string
elif message == Event.LEVEL:
self.create_game_info()
self.shared['game_info']['level'] = message.level
elif message == Message.START_NEW_GAME:
EventHandler.write_to_clients({'msg': 'New game'})
elif message == Message.SEARCH_STARTED:
EventHandler.write_to_clients({'msg': 'Thinking..'})
elif message == Message.COMPUTER_MOVE or message == Message.USER_MOVE or message == Message.REVIEW_MODE_MOVE:
game = pgn.Game()
self.create_game_header(game)
tmp = game
move_stack = message.game.move_stack
for move in move_stack:
tmp = tmp.add_variation(move)
exporter = pgn.StringExporter()
game.export(exporter, headers=True, comments=False, variations=False)
fen = message.game.fen()
pgn_str = str(exporter)
r = {'move': str(message.move), 'pgn': pgn_str, 'fen': fen}
if message == Message.COMPUTER_MOVE:
r['msg']= 'Computer move: '+str(message.move)
elif message == Message.USER_MOVE:
r['msg']= 'User move: '+str(message.move)
self.shared['last_dgt_move_msg'] = r
EventHandler.write_to_clients(r)
def create_task(self, msg):
IOLoop.instance().add_callback(callback=lambda: self.task(msg))
def run(self):
while True:
#Check if we have something to display
message = self.message_queue.get()
# print(message.options)
self.create_task(message)
| gpl-3.0 | 7,047,831,063,330,097,000 | 34.273077 | 167 | 0.59819 | false |
sunjincheng121/flink | flink-python/pyflink/table/tests/test_environment_settings.py | 13 | 5290 | ################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
from pyflink.java_gateway import get_gateway
from pyflink.table import EnvironmentSettings
from pyflink.testing.test_case_utils import PyFlinkTestCase, get_private_field
class EnvironmentSettingsTests(PyFlinkTestCase):
def test_planner_selection(self):
gateway = get_gateway()
CLASS_NAME = gateway.jvm.EnvironmentSettings.CLASS_NAME
builder = EnvironmentSettings.new_instance()
OLD_PLANNER_FACTORY = get_private_field(builder._j_builder, "OLD_PLANNER_FACTORY")
OLD_EXECUTOR_FACTORY = get_private_field(builder._j_builder, "OLD_EXECUTOR_FACTORY")
BLINK_PLANNER_FACTORY = get_private_field(builder._j_builder, "BLINK_PLANNER_FACTORY")
BLINK_EXECUTOR_FACTORY = get_private_field(builder._j_builder, "BLINK_EXECUTOR_FACTORY")
# test the default behaviour to make sure it is consistent with the python doc
envrionment_settings = builder.build()
self.assertEqual(
envrionment_settings._j_environment_settings.toPlannerProperties()[CLASS_NAME],
BLINK_PLANNER_FACTORY)
self.assertEqual(
envrionment_settings._j_environment_settings.toExecutorProperties()[CLASS_NAME],
BLINK_EXECUTOR_FACTORY)
# test use_old_planner
envrionment_settings = builder.use_old_planner().build()
self.assertEqual(
envrionment_settings._j_environment_settings.toPlannerProperties()[CLASS_NAME],
OLD_PLANNER_FACTORY)
self.assertEqual(
envrionment_settings._j_environment_settings.toExecutorProperties()[CLASS_NAME],
OLD_EXECUTOR_FACTORY)
# test use_blink_planner
envrionment_settings = builder.use_blink_planner().build()
self.assertEqual(
envrionment_settings._j_environment_settings.toPlannerProperties()[CLASS_NAME],
BLINK_PLANNER_FACTORY)
self.assertEqual(
envrionment_settings._j_environment_settings.toExecutorProperties()[CLASS_NAME],
BLINK_EXECUTOR_FACTORY)
# test use_any_planner
envrionment_settings = builder.use_any_planner().build()
self.assertTrue(
CLASS_NAME not in envrionment_settings._j_environment_settings.toPlannerProperties())
self.assertTrue(
CLASS_NAME not in envrionment_settings._j_environment_settings.toExecutorProperties())
def test_mode_selection(self):
builder = EnvironmentSettings.new_instance()
# test the default behaviour to make sure it is consistent with the python doc
envrionment_settings = builder.build()
self.assertTrue(envrionment_settings.is_streaming_mode())
# test in_streaming_mode
envrionment_settings = builder.in_streaming_mode().build()
self.assertTrue(envrionment_settings.is_streaming_mode())
# test in_batch_mode
envrionment_settings = builder.in_batch_mode().build()
self.assertFalse(envrionment_settings.is_streaming_mode())
def test_with_built_in_catalog_name(self):
gateway = get_gateway()
DEFAULT_BUILTIN_CATALOG = gateway.jvm.EnvironmentSettings.DEFAULT_BUILTIN_CATALOG
builder = EnvironmentSettings.new_instance()
# test the default behaviour to make sure it is consistent with the python doc
envrionment_settings = builder.build()
self.assertEqual(envrionment_settings.get_built_in_catalog_name(), DEFAULT_BUILTIN_CATALOG)
envrionment_settings = builder.with_built_in_catalog_name("my_catalog").build()
self.assertEqual(envrionment_settings.get_built_in_catalog_name(), "my_catalog")
def test_with_built_in_database_name(self):
gateway = get_gateway()
DEFAULT_BUILTIN_DATABASE = gateway.jvm.EnvironmentSettings.DEFAULT_BUILTIN_DATABASE
builder = EnvironmentSettings.new_instance()
# test the default behaviour to make sure it is consistent with the python doc
envrionment_settings = builder.build()
self.assertEqual(envrionment_settings.get_built_in_database_name(),
DEFAULT_BUILTIN_DATABASE)
envrionment_settings = builder.with_built_in_database_name("my_database").build()
self.assertEqual(envrionment_settings.get_built_in_database_name(), "my_database")
| apache-2.0 | 1,773,832,937,216,332,800 | 38.774436 | 99 | 0.679395 | false |
Nick-OpusVL/odoo | addons/sale/wizard/__init__.py | 444 | 1129 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sale_make_invoice
import sale_line_invoice
import sale_make_invoice_advance
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 1,724,158,854,674,093,800 | 42.423077 | 78 | 0.627989 | false |
mxOBS/deb-pkg_trusty_chromium-browser | third_party/webdriver/pylib/test/selenium/webdriver/common/rendered_webelement_tests.py | 28 | 2265 | # Copyright 2008-2009 WebDriver committers
# Copyright 2008-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import pytest
from selenium.webdriver.common.by import By
class RenderedWebElementTests(unittest.TestCase):
@pytest.mark.ignore_chrome
def testShouldPickUpStyleOfAnElement(self):
self._loadPage("javascriptPage")
element = self.driver.find_element(by=By.ID, value="green-parent")
backgroundColour = element.value_of_css_property("background-color")
self.assertEqual("rgba(0, 128, 0, 1)", backgroundColour)
element = self.driver.find_element(by=By.ID, value="red-item")
backgroundColour = element.value_of_css_property("background-color")
self.assertEqual("rgba(255, 0, 0, 1)", backgroundColour)
@pytest.mark.ignore_chrome
def testShouldAllowInheritedStylesToBeUsed(self):
self._loadPage("javascriptPage")
element = self.driver.find_element(by=By.ID, value="green-item")
backgroundColour = element.value_of_css_property("background-color")
self.assertEqual("transparent", backgroundColour)
def testShouldCorrectlyIdentifyThatAnElementHasWidth(self):
self._loadPage("xhtmlTest")
shrinko = self.driver.find_element(by=By.ID, value="linkId")
size = shrinko.size
self.assertTrue(size["width"] > 0, "Width expected to be greater than 0")
self.assertTrue(size["height"] > 0, "Height expected to be greater than 0")
def _pageURL(self, name):
return "http://localhost:%d/%s.html" % (self.webserver.port, name)
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
| bsd-3-clause | -8,226,931,401,569,406,000 | 34.390625 | 83 | 0.70596 | false |
zevanzhao/TCCL-Code | Misc/xyz2mode.py | 1 | 1906 | #!/usr/bin/env python
"""
This script is similiar to modemake.pl.
The script will generate DIMER_VECTOR in CP2K.
"""
from XYZFile import *
import sys, copy
def GetDiffXYZ(XYZ1, XYZ2):
"""
Get the difference of two XYZ file.
A DIMER VECTOR or MODECAR is made from this difference.
"""
DiffXYZ = XYZ()
if ( XYZ1.NumofAtoms == XYZ2.NumofAtoms ):
DiffXYZ.NumofAtoms = XYZ1.NumofAtoms
else:
print "Error: Different structures: %d atoms in XYZ1 and %d in XYZ2." % (XYZ1.NumofAtoms, XYZ2.NumofAtoms)
DiffXYZ = XYZ()
return DiffXYZ
for i in range(0, DiffXYZ.NumofAtoms):
tmpAtom = Atom()
if (XYZ1.Atoms[i].Symbol == XYZ2.Atoms[i].Symbol):
tmpAtom.Symbol = XYZ1.Atoms[i].Symbol
tmpAtom.Coord = XYZ2.Atoms[i].Coord - XYZ1.Atoms[i].Coord
DiffXYZ.Atoms.append(tmpAtom)
else:
print "Error: Different Atom N.O. %d: %s in XYZ1 and %s in XYZ2 " % (i+1, XYZ1.Atoms[i].Symbol, XYZ2.Atoms[i].Symbol)
DiffXYZ = XYZ()
return DiffXYZ
return DiffXYZ
#Main function
if (len(sys.argv) != 3 ):
print "Usage: %s [initial.xyz] [final.xyz]" % (sys.argv[0])
exit(0)
initial = XYZ()
initial.ReadXYZ(sys.argv[1])
#debug lines
#print "Initial structure:"
#initial.PrintXYZ()
final = XYZ()
final.ReadXYZ(sys.argv[2])
#debug lines
#print "Final structure:"
#final.PrintXYZ()
DiffXYZ = GetDiffXYZ(initial, final)
#debug lines
#DiffXYZ.PrintXYZ()
#Generate the Dimer Vector from the two XYZ files.
Mode = copy.deepcopy(DiffXYZ)
sumvec = 0
for i in range(0, Mode.NumofAtoms):
for j in range(0, 3):
sumvec += Mode.Atoms[i].Coord[j]*Mode.Atoms[i].Coord[j]
sumvec = math.sqrt(sumvec)
for i in range(0, Mode.NumofAtoms):
for j in range(0, 3):
Mode.Atoms[i].Coord[j] /= sumvec
# debug line
# Mode.PrintXYZ()
Mode.WriteMode("DIMER_VECTOR")
#
| gpl-3.0 | 35,441,942,985,437,564 | 28.78125 | 129 | 0.637461 | false |
dtroyer/python-openstacksdk | openstack/tests/unit/cloud/test_qos_policy.py | 1 | 13244 | # Copyright 2017 OVH SAS
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
from openstack.cloud import exc
from openstack.tests.unit import base
class TestQosPolicy(base.TestCase):
policy_name = 'qos test policy'
policy_id = '881d1bb7-a663-44c0-8f9f-ee2765b74486'
project_id = 'c88fc89f-5121-4a4c-87fd-496b5af864e9'
mock_policy = {
'id': policy_id,
'name': policy_name,
'description': '',
'rules': [],
'project_id': project_id,
'tenant_id': project_id,
'shared': False,
'is_default': False
}
qos_extension = {
"updated": "2015-06-08T10:00:00-00:00",
"name": "Quality of Service",
"links": [],
"alias": "qos",
"description": "The Quality of Service extension."
}
qos_default_extension = {
"updated": "2017-041-06T10:00:00-00:00",
"name": "QoS default policy",
"links": [],
"alias": "qos-default",
"description": "Expose the QoS default policy per project"
}
enabled_neutron_extensions = [qos_extension, qos_default_extension]
def test_get_qos_policy(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': self.enabled_neutron_extensions}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'qos', 'policies.json']),
json={'policies': [self.mock_policy]})
])
r = self.cloud.get_qos_policy(self.policy_name)
self.assertIsNotNone(r)
self.assertDictEqual(self.mock_policy, r)
self.assert_calls()
def test_get_qos_policy_no_qos_extension(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': []})
])
self.assertRaises(
exc.OpenStackCloudException,
self.cloud.get_qos_policy, self.policy_name)
self.assert_calls()
def test_create_qos_policy(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': self.enabled_neutron_extensions}),
dict(method='POST',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'qos', 'policies.json']),
json={'policy': self.mock_policy})
])
policy = self.cloud.create_qos_policy(
name=self.policy_name, project_id=self.project_id)
self.assertDictEqual(self.mock_policy, policy)
self.assert_calls()
def test_create_qos_policy_no_qos_extension(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': []})
])
self.assertRaises(
exc.OpenStackCloudException,
self.cloud.create_qos_policy, name=self.policy_name)
self.assert_calls()
def test_create_qos_policy_no_qos_default_extension(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': [self.qos_extension]}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': [self.qos_extension]}),
dict(method='POST',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'qos', 'policies.json']),
json={'policy': self.mock_policy},
validate=dict(
json={'policy': {
'name': self.policy_name,
'project_id': self.project_id}}))
])
policy = self.cloud.create_qos_policy(
name=self.policy_name, project_id=self.project_id, default=True)
self.assertDictEqual(self.mock_policy, policy)
self.assert_calls()
def test_delete_qos_policy(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': self.enabled_neutron_extensions}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': self.enabled_neutron_extensions}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'qos', 'policies.json']),
json={'policies': [self.mock_policy]}),
dict(method='DELETE',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'qos', 'policies',
'%s.json' % self.policy_id]),
json={})
])
self.assertTrue(self.cloud.delete_qos_policy(self.policy_name))
self.assert_calls()
def test_delete_qos_policy_no_qos_extension(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': []})
])
self.assertRaises(
exc.OpenStackCloudException,
self.cloud.delete_qos_policy, self.policy_name)
self.assert_calls()
def test_delete_qos_policy_not_found(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': self.enabled_neutron_extensions}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': self.enabled_neutron_extensions}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'qos', 'policies.json']),
json={'policies': []})
])
self.assertFalse(self.cloud.delete_qos_policy('goofy'))
self.assert_calls()
def test_delete_qos_policy_multiple_found(self):
policy1 = dict(id='123', name=self.policy_name)
policy2 = dict(id='456', name=self.policy_name)
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': self.enabled_neutron_extensions}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': self.enabled_neutron_extensions}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'qos', 'policies.json']),
json={'policies': [policy1, policy2]})
])
self.assertRaises(exc.OpenStackCloudException,
self.cloud.delete_qos_policy,
self.policy_name)
self.assert_calls()
def test_delete_qos_policy_multiple_using_id(self):
policy1 = self.mock_policy
policy2 = dict(id='456', name=self.policy_name)
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': self.enabled_neutron_extensions}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': self.enabled_neutron_extensions}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'qos', 'policies.json']),
json={'policies': [policy1, policy2]}),
dict(method='DELETE',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'qos', 'policies',
'%s.json' % self.policy_id]),
json={})
])
self.assertTrue(self.cloud.delete_qos_policy(policy1['id']))
self.assert_calls()
def test_update_qos_policy(self):
expected_policy = copy.copy(self.mock_policy)
expected_policy['name'] = 'goofy'
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': self.enabled_neutron_extensions}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': self.enabled_neutron_extensions}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'qos', 'policies.json']),
json={'policies': [self.mock_policy]}),
dict(method='PUT',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'qos', 'policies',
'%s.json' % self.policy_id]),
json={'policy': expected_policy},
validate=dict(
json={'policy': {'name': 'goofy'}}))
])
policy = self.cloud.update_qos_policy(
self.policy_id, name='goofy')
self.assertDictEqual(expected_policy, policy)
self.assert_calls()
def test_update_qos_policy_no_qos_extension(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': []})
])
self.assertRaises(
exc.OpenStackCloudException,
self.cloud.update_qos_policy, self.policy_id, name="goofy")
self.assert_calls()
def test_update_qos_policy_no_qos_default_extension(self):
expected_policy = copy.copy(self.mock_policy)
expected_policy['name'] = 'goofy'
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': [self.qos_extension]}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': [self.qos_extension]}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'extensions.json']),
json={'extensions': [self.qos_extension]}),
dict(method='GET',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'qos', 'policies.json']),
json={'policies': [self.mock_policy]}),
dict(method='PUT',
uri=self.get_mock_url(
'network', 'public',
append=['v2.0', 'qos', 'policies',
'%s.json' % self.policy_id]),
json={'policy': expected_policy},
validate=dict(
json={'policy': {'name': "goofy"}}))
])
policy = self.cloud.update_qos_policy(
self.policy_id, name='goofy', default=True)
self.assertDictEqual(expected_policy, policy)
self.assert_calls()
| apache-2.0 | -8,820,054,015,115,783,000 | 40.130435 | 78 | 0.502567 | false |
KlaasDeNys/Arduino | arduino-core/src/processing/app/i18n/python/requests/sessions.py | 151 | 15091 | # -*- coding: utf-8 -*-
"""
requests.session
~~~~~~~~~~~~~~~~
This module provides a Session object to manage and persist settings across
requests (cookies, auth, proxies).
"""
import os
from datetime import datetime
from .compat import cookielib
from .cookies import cookiejar_from_dict
from .models import Request, PreparedRequest
from .hooks import default_hooks, dispatch_hook
from .utils import from_key_val_list, default_headers
from .exceptions import TooManyRedirects, InvalidSchema
from .compat import urlparse, urljoin
from .adapters import HTTPAdapter
from .utils import requote_uri, get_environ_proxies, get_netrc_auth
from .status_codes import codes
REDIRECT_STATI = (
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.temporary_moved, # 307
)
DEFAULT_REDIRECT_LIMIT = 30
def merge_kwargs(local_kwarg, default_kwarg):
"""Merges kwarg dictionaries.
If a local key in the dictionary is set to None, it will be removed.
"""
if default_kwarg is None:
return local_kwarg
if isinstance(local_kwarg, str):
return local_kwarg
if local_kwarg is None:
return default_kwarg
# Bypass if not a dictionary (e.g. timeout)
if not hasattr(default_kwarg, 'items'):
return local_kwarg
default_kwarg = from_key_val_list(default_kwarg)
local_kwarg = from_key_val_list(local_kwarg)
# Update new values in a case-insensitive way
def get_original_key(original_keys, new_key):
"""
Finds the key from original_keys that case-insensitive matches new_key.
"""
for original_key in original_keys:
if key.lower() == original_key.lower():
return original_key
return new_key
kwargs = default_kwarg.copy()
original_keys = kwargs.keys()
for key, value in local_kwarg.items():
kwargs[get_original_key(original_keys, key)] = value
# Remove keys that are set to None.
for (k, v) in local_kwarg.items():
if v is None:
del kwargs[k]
return kwargs
class SessionRedirectMixin(object):
def resolve_redirects(self, resp, req, stream=False, timeout=None,
verify=True, cert=None, proxies=None):
"""Receives a Response. Returns a generator of Responses."""
i = 0
prepared_request = PreparedRequest()
prepared_request.body = req.body
prepared_request.headers = req.headers.copy()
prepared_request.hooks = req.hooks
prepared_request.method = req.method
prepared_request.url = req.url
cookiejar = resp.cookies
# ((resp.status_code is codes.see_other))
while (('location' in resp.headers and resp.status_code in REDIRECT_STATI)):
resp.content # Consume socket so it can be released
if i >= self.max_redirects:
raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects)
# Release the connection back into the pool.
resp.close()
url = resp.headers['location']
method = prepared_request.method
# Handle redirection without scheme (see: RFC 1808 Section 4)
if url.startswith('//'):
parsed_rurl = urlparse(resp.url)
url = '%s:%s' % (parsed_rurl.scheme, url)
# Facilitate non-RFC2616-compliant 'location' headers
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
if not urlparse(url).netloc:
# Compliant with RFC3986, we percent encode the url.
url = urljoin(resp.url, requote_uri(url))
prepared_request.url = url
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4
if (resp.status_code == codes.see_other and
prepared_request.method != 'HEAD'):
method = 'GET'
# Do what the browsers do, despite standards...
if (resp.status_code in (codes.moved, codes.found) and
prepared_request.method == 'POST'):
method = 'GET'
prepared_request.method = method
if resp.status_code is not codes.temporary:
if 'Content-Length' in prepared_request.headers:
del prepared_request.headers['Content-Length']
prepared_request.body = None
headers = prepared_request.headers
try:
del headers['Cookie']
except KeyError:
pass
prepared_request.prepare_cookies(cookiejar)
resp = self.send(
prepared_request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
allow_redirects=False,
)
cookiejar.update(resp.cookies)
i += 1
yield resp
resp.cookies.update(cookiejar)
class Session(SessionRedirectMixin):
"""A Requests session.
Provides cookie persistience, connection-pooling, and configuration.
Basic Usage::
>>> import requests
>>> s = requests.Session()
>>> s.get('http://httpbin.org/get')
200
"""
__attrs__ = [
'headers', 'cookies', 'auth', 'timeout', 'proxies', 'hooks',
'params', 'verify', 'cert', 'prefetch']
def __init__(self):
#: A case-insensitive dictionary of headers to be sent on each
#: :class:`Request <Request>` sent from this
#: :class:`Session <Session>`.
self.headers = default_headers()
#: Default Authentication tuple or object to attach to
#: :class:`Request <Request>`.
self.auth = None
#: Dictionary mapping protocol to the URL of the proxy (e.g.
#: {'http': 'foo.bar:3128'}) to be used on each
#: :class:`Request <Request>`.
self.proxies = {}
#: Event-handling hooks.
self.hooks = default_hooks()
#: Dictionary of querystring data to attach to each
#: :class:`Request <Request>`. The dictionary values may be lists for
#: representing multivalued query parameters.
self.params = {}
#: Stream response content default.
self.stream = False
#: SSL Verification default.
self.verify = True
#: SSL certificate default.
self.cert = None
#: Maximum number of redirects to follow.
self.max_redirects = DEFAULT_REDIRECT_LIMIT
#: Should we trust the environment?
self.trust_env = True
# Set up a CookieJar to be used by default
self.cookies = cookiejar_from_dict({})
# Default connection adapters.
self.adapters = {}
self.mount('http://', HTTPAdapter())
self.mount('https://', HTTPAdapter())
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def request(self, method, url,
params=None,
data=None,
headers=None,
cookies=None,
files=None,
auth=None,
timeout=None,
allow_redirects=True,
proxies=None,
hooks=None,
stream=None,
verify=None,
cert=None):
cookies = cookies or {}
proxies = proxies or {}
# Bootstrap CookieJar.
if not isinstance(cookies, cookielib.CookieJar):
cookies = cookiejar_from_dict(cookies)
# Merge with session cookies
merged_cookies = self.cookies.copy()
merged_cookies.update(cookies)
cookies = merged_cookies
# Gather clues from the surrounding environment.
if self.trust_env:
# Set environment's proxies.
env_proxies = get_environ_proxies(url) or {}
for (k, v) in env_proxies.items():
proxies.setdefault(k, v)
# Set environment's basic authentication.
if not auth:
auth = get_netrc_auth(url)
# Look for configuration.
if not verify and verify is not False:
verify = os.environ.get('REQUESTS_CA_BUNDLE')
# Curl compatibility.
if not verify and verify is not False:
verify = os.environ.get('CURL_CA_BUNDLE')
# Merge all the kwargs.
params = merge_kwargs(params, self.params)
headers = merge_kwargs(headers, self.headers)
auth = merge_kwargs(auth, self.auth)
proxies = merge_kwargs(proxies, self.proxies)
hooks = merge_kwargs(hooks, self.hooks)
stream = merge_kwargs(stream, self.stream)
verify = merge_kwargs(verify, self.verify)
cert = merge_kwargs(cert, self.cert)
# Create the Request.
req = Request()
req.method = method.upper()
req.url = url
req.headers = headers
req.files = files
req.data = data
req.params = params
req.auth = auth
req.cookies = cookies
req.hooks = hooks
# Prepare the Request.
prep = req.prepare()
# Send the request.
send_kwargs = {
'stream': stream,
'timeout': timeout,
'verify': verify,
'cert': cert,
'proxies': proxies,
'allow_redirects': allow_redirects,
'req': req,
}
resp = self.send(prep, **send_kwargs)
# Persist cookies.
self.cookies.update(resp.cookies)
return resp
def get(self, url, **kwargs):
"""Sends a GET request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return self.request('GET', url, **kwargs)
def options(self, url, **kwargs):
"""Sends a OPTIONS request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return self.request('OPTIONS', url, **kwargs)
def head(self, url, **kwargs):
"""Sends a HEAD request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', False)
return self.request('HEAD', url, **kwargs)
def post(self, url, data=None, **kwargs):
"""Sends a POST request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('POST', url, data=data, **kwargs)
def put(self, url, data=None, **kwargs):
"""Sends a PUT request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('PUT', url, data=data, **kwargs)
def patch(self, url, data=None, **kwargs):
"""Sends a PATCH request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('PATCH', url, data=data, **kwargs)
def delete(self, url, **kwargs):
"""Sends a DELETE request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('DELETE', url, **kwargs)
def send(self, request, **kwargs):
"""Send a given PreparedRequest."""
# It's possible that users might accidentally send a Request object.
# Guard against that specific failure case.
if getattr(request, 'prepare', None):
raise ValueError('You can only send PreparedRequests.')
# Set up variables needed for resolve_redirects and dispatching of
# hooks
allow_redirects = kwargs.pop('allow_redirects', True)
req = kwargs.pop('req', None)
stream = kwargs.get('stream', False)
timeout = kwargs.get('timeout')
verify = kwargs.get('verify')
cert = kwargs.get('cert')
proxies = kwargs.get('proxies')
hooks = request.hooks
# Get the appropriate adapter to use
adapter = self.get_adapter(url=request.url)
# Start time (approximately) of the request
start = datetime.utcnow()
# Send the request
r = adapter.send(request, **kwargs)
# Total elapsed time of the request (approximately)
r.elapsed = datetime.utcnow() - start
# Response manipulation hooks
r = dispatch_hook('response', hooks, r, **kwargs)
# Redirect resolving generator.
gen = self.resolve_redirects(r, request, stream=stream,
timeout=timeout, verify=verify, cert=cert,
proxies=proxies)
# Resolve redirects if allowed.
history = [resp for resp in gen] if allow_redirects else []
# Shuffle things around if there's history.
if history:
# Insert the first (original) request at the start
history.insert(0, r)
# Get the last request made
r = history.pop()
r.history = tuple(history)
return r
def get_adapter(self, url):
"""Returns the appropriate connnection adapter for the given URL."""
for (prefix, adapter) in self.adapters.items():
if url.startswith(prefix):
return adapter
# Nothing matches :-/
raise InvalidSchema("No connection adapters were found for '%s'" % url)
def close(self):
"""Closes all adapters and as such the session"""
for _, v in self.adapters.items():
v.close()
def mount(self, prefix, adapter):
"""Registers a connection adapter to a prefix."""
self.adapters[prefix] = adapter
def __getstate__(self):
return dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
def __setstate__(self, state):
for attr, value in state.items():
setattr(self, attr, value)
def session():
"""Returns a :class:`Session` for context-management."""
return Session()
| lgpl-2.1 | 7,249,836,560,818,372,000 | 30.972458 | 115 | 0.585581 | false |
DirtyUnicorns/android_external_chromium-org | ppapi/generators/idl_generator.py | 165 | 8397 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
from idl_log import ErrOut, InfoOut, WarnOut
from idl_option import GetOption, Option, ParseOptions
from idl_parser import ParseFiles
GeneratorList = []
Option('out', 'List of output files', default='')
Option('release', 'Which release to generate.', default='')
Option('range', 'Which ranges in the form of MIN,MAX.', default='start,end')
class Generator(object):
"""Base class for generators.
This class provides a mechanism for adding new generator objects to the IDL
driver. To use this class override the GenerateRelease and GenerateRange
members, and instantiate one copy of the class in the same module which
defines it to register the generator. After the AST is generated, call the
static Run member which will check every registered generator to see which
ones have been enabled through command-line options. To enable a generator
use the switches:
--<sname> : To enable with defaults
--<sname>_opt=<XXX,YYY=y> : To enable with generator specific options.
NOTE: Generators still have access to global options
"""
def __init__(self, name, sname, desc):
self.name = name
self.run_switch = Option(sname, desc)
self.opt_switch = Option(sname + '_opt', 'Options for %s.' % sname,
default='')
GeneratorList.append(self)
self.errors = 0
self.skip_list = []
def Error(self, msg):
ErrOut.Log('Error %s : %s' % (self.name, msg))
self.errors += 1
def GetRunOptions(self):
options = {}
option_list = self.opt_switch.Get()
if option_list:
option_list = option_list.split(',')
for opt in option_list:
offs = opt.find('=')
if offs > 0:
options[opt[:offs]] = opt[offs+1:]
else:
options[opt] = True
return options
if self.run_switch.Get():
return options
return None
def Generate(self, ast, options):
self.errors = 0
rangestr = GetOption('range')
releasestr = GetOption('release')
print "Found releases: %s" % ast.releases
# Generate list of files to ignore due to errors
for filenode in ast.GetListOf('File'):
# If this file has errors, skip it
if filenode.GetProperty('ERRORS') > 0:
self.skip_list.append(filenode)
continue
# Check for a range option which over-rides a release option
if not releasestr and rangestr:
range_list = rangestr.split(',')
if len(range_list) != 2:
self.Error('Failed to generate for %s, incorrect range: "%s"' %
(self.name, rangestr))
else:
vmin = range_list[0]
vmax = range_list[1]
# Generate 'start' and 'end' represent first and last found.
if vmin == 'start':
vmin = ast.releases[0]
if vmax == 'end':
vmax = ast.releases[-1]
vmin = ast.releases.index(vmin)
vmax = ast.releases.index(vmax) + 1
releases = ast.releases[vmin:vmax]
InfoOut.Log('Generate range %s of %s.' % (rangestr, self.name))
ret = self.GenerateRange(ast, releases, options)
if ret < 0:
self.Error('Failed to generate range %s : %s.' %(vmin, vmax))
else:
InfoOut.Log('%s wrote %d files.' % (self.name, ret))
# Otherwise this should be a single release generation
else:
if releasestr == 'start':
releasestr = ast.releases[0]
if releasestr == 'end':
releasestr = ast.releases[-1]
if releasestr > ast.releases[-1]:
InfoOut.Log('There is no unique release for %s, using last release.' %
releasestr)
releasestr = ast.releases[-1]
if releasestr not in ast.releases:
self.Error('Release %s not in [%s].' %
(releasestr, ', '.join(ast.releases)))
if releasestr:
InfoOut.Log('Generate release %s of %s.' % (releasestr, self.name))
ret = self.GenerateRelease(ast, releasestr, options)
if ret < 0:
self.Error('Failed to generate release %s.' % releasestr)
else:
InfoOut.Log('%s wrote %d files.' % (self.name, ret))
else:
self.Error('No range or release specified for %s.' % releasestr)
return self.errors
def GenerateRelease(self, ast, release, options):
__pychecker__ = 'unusednames=ast,release,options'
self.Error("Undefined release generator.")
return 0
def GenerateRange(self, ast, releases, options):
__pychecker__ = 'unusednames=ast,releases,options'
self.Error("Undefined range generator.")
return 0
@staticmethod
def Run(ast):
fail_count = 0
# Check all registered generators if they should run.
for gen in GeneratorList:
options = gen.GetRunOptions()
if options is not None:
if gen.Generate(ast, options):
fail_count += 1
return fail_count
class GeneratorByFile(Generator):
"""A simplified generator that generates one output file per IDL source file.
A subclass of Generator for use of generators which have a one to one
mapping between IDL sources and output files.
Derived classes should define GenerateFile.
"""
def GenerateFile(self, filenode, releases, options):
"""Generates an output file from the IDL source.
Returns true if the generated file is different than the previously
generated file.
"""
__pychecker__ = 'unusednames=filenode,releases,options'
self.Error("Undefined release generator.")
return 0
def GenerateRelease(self, ast, release, options):
return self.GenerateRange(ast, [release], options)
def GenerateRange(self, ast, releases, options):
# Get list of out files
outlist = GetOption('out')
if outlist: outlist = outlist.split(',')
skipList = []
cnt = 0
for filenode in ast.GetListOf('File'):
# Ignore files with errors
if filenode in self.skip_list:
continue
# Skip this file if not required
if outlist and filenode.GetName() not in outlist:
continue
# Create the output file and increment out count if there was a delta
if self.GenerateFile(filenode, releases, options):
cnt = cnt + 1
for filenode in skipList:
errcnt = filenode.GetProperty('ERRORS')
ErrOut.Log('%s : Skipped because of %d errors.' % (
filenode.GetName(), errcnt))
if skipList:
return -len(skipList)
if GetOption('diff'):
return -cnt
return cnt
check_release = 0
check_range = 0
class GeneratorReleaseTest(Generator):
def GenerateRelease(self, ast, release, options = {}):
__pychecker__ = 'unusednames=ast,release,options'
global check_release
check_map = {
'so_long': True,
'MyOpt': 'XYZ',
'goodbye': True
}
check_release = 1
for item in check_map:
check_item = check_map[item]
option_item = options.get(item, None)
if check_item != option_item:
print 'Option %s is %s, expecting %s' % (item, option_item, check_item)
check_release = 0
if release != 'M14':
check_release = 0
return check_release == 1
def GenerateRange(self, ast, releases, options):
__pychecker__ = 'unusednames=ast,releases,options'
global check_range
check_range = 1
return True
def Test():
__pychecker__ = 'unusednames=args'
global check_release
global check_range
ParseOptions(['--testgen_opt=so_long,MyOpt=XYZ,goodbye'])
if Generator.Run('AST') != 0:
print 'Generate release: Failed.\n'
return -1
if check_release != 1 or check_range != 0:
print 'Gererate release: Failed to run.\n'
return -1
check_release = 0
ParseOptions(['--testgen_opt="HELLO"', '--range=M14,M16'])
if Generator.Run('AST') != 0:
print 'Generate range: Failed.\n'
return -1
if check_release != 0 or check_range != 1:
print 'Gererate range: Failed to run.\n'
return -1
print 'Generator test: Pass'
return 0
def Main(args):
if not args: return Test()
filenames = ParseOptions(args)
ast = ParseFiles(filenames)
return Generator.Run(ast)
if __name__ == '__main__':
GeneratorReleaseTest('Test Gen', 'testgen', 'Generator Class Test.')
sys.exit(Main(sys.argv[1:]))
| bsd-3-clause | -7,713,490,069,690,885,000 | 29.314079 | 79 | 0.636894 | false |
ehashman/oh-mainline | vendor/packages/Pygments/pygments/lexers/special.py | 270 | 3080 | # -*- coding: utf-8 -*-
"""
pygments.lexers.special
~~~~~~~~~~~~~~~~~~~~~~~
Special lexers.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import cStringIO
from pygments.lexer import Lexer
from pygments.token import Token, Error, Text
from pygments.util import get_choice_opt, b
__all__ = ['TextLexer', 'RawTokenLexer']
class TextLexer(Lexer):
"""
"Null" lexer, doesn't highlight anything.
"""
name = 'Text only'
aliases = ['text']
filenames = ['*.txt']
mimetypes = ['text/plain']
def get_tokens_unprocessed(self, text):
yield 0, Text, text
_ttype_cache = {}
line_re = re.compile(b('.*?\n'))
class RawTokenLexer(Lexer):
"""
Recreate a token stream formatted with the `RawTokenFormatter`. This
lexer raises exceptions during parsing if the token stream in the
file is malformed.
Additional options accepted:
`compress`
If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
the given compression algorithm before lexing (default: ``""``).
"""
name = 'Raw token data'
aliases = ['raw']
filenames = []
mimetypes = ['application/x-pygments-tokens']
def __init__(self, **options):
self.compress = get_choice_opt(options, 'compress',
['', 'none', 'gz', 'bz2'], '')
Lexer.__init__(self, **options)
def get_tokens(self, text):
if isinstance(text, unicode):
# raw token stream never has any non-ASCII characters
text = text.encode('ascii')
if self.compress == 'gz':
import gzip
gzipfile = gzip.GzipFile('', 'rb', 9, cStringIO.StringIO(text))
text = gzipfile.read()
elif self.compress == 'bz2':
import bz2
text = bz2.decompress(text)
# do not call Lexer.get_tokens() because we do not want Unicode
# decoding to occur, and stripping is not optional.
text = text.strip(b('\n')) + b('\n')
for i, t, v in self.get_tokens_unprocessed(text):
yield t, v
def get_tokens_unprocessed(self, text):
length = 0
for match in line_re.finditer(text):
try:
ttypestr, val = match.group().split(b('\t'), 1)
except ValueError:
val = match.group().decode(self.encoding)
ttype = Error
else:
ttype = _ttype_cache.get(ttypestr)
if not ttype:
ttype = Token
ttypes = ttypestr.split('.')[1:]
for ttype_ in ttypes:
if not ttype_ or not ttype_[0].isupper():
raise ValueError('malformed token name')
ttype = getattr(ttype, ttype_)
_ttype_cache[ttypestr] = ttype
val = val[2:-2].decode('unicode-escape')
yield length, ttype, val
length += len(val)
| agpl-3.0 | 3,412,416,267,934,151,000 | 29.8 | 75 | 0.542857 | false |
ngoclinh84phys/QE_finite_field | dev-tools/src-normal.py | 20 | 6001 | #!/usr/bin/env python
# (C) 2010 Norbert Nemec
#
# USAGE: src-normal.py < input.f90 > output.f90
#
# Script to normalize Fortran source code:
# a) expand tabs to spaces (tab width 8 characters
# b) remove trailing space
# c) normalize multiword keywords
# d) normalize capitalization of keywords and intrinsics
# d) replace old relational operators (.eq., .gt., etc.) by new ones (==, >, etc.)
# The script skips comments and strings within the code
import sys,re
dropspace_list = [
"BLOCK *DATA",
"CASE *DEFAULT", # SPLIT NOT OPTIONAL !
"DOUBLE *PRECISION",
"DO *WHILE", # SPLIT NOT OPTIONAL !
"ELSE *IF",
"END *BLOCK *DATA",
"END *DO",
"END *FILE",
"END *FORALL",
"END *FUNCTION",
"END *IF",
"END *INTERFACE",
"END *MODULE",
"END *PROGRAM",
"END *SELECT",
"END *SUBROUTINE",
"END *TYPE",
"END *WHERE",
"GO *TO",
"IN *OUT",
"MODULE *PROCEDURE", # SPLIT NOT OPTIONAL !
"SELECT *CASE",
]
splitword_list = [
"BLOCK DATA",
"CASE DEFAULT", # SPLIT NOT OPTIONAL
"DOUBLE PRECISION",
"DO WHILE", # SPLIT NOT OPTIONAL
# "ELSEIF", # leave as one word
"END BLOCK DATA",
# "ENDDO", # leave as one word
"END FILE",
"END FORALL",
"END FUNCTION",
# "ENDIF", # leave as one word
"END INTERFACE",
"END MODULE",
"END PROGRAM",
"END SELECT",
"END SUBROUTINE",
"END TYPE",
"END WHERE",
# "GOTO", # leave as one word
# "INOUT", # leave as one word
"MODULE PROCEDURE", # SPLIT NOT OPTIONAL
"SELECT CASE",
]
dropspace_re = re.compile(r"\b("+"|".join(dropspace_list)+r")\b",re.I)
def dropspace_fn(s):
return s.group(0).replace(" ","")
splitword_dict = dict( (a.replace(" ","").lower(),a) for a in splitword_list )
splitword_re = re.compile(r"\b("+"|".join(splitword_list).replace(" ","")+r")\b",re.I)
def splitword_fn(s):
return splitword_dict[s.group(0).lower()]
uppercase_keywords = r"""
MODULE SUBROUTINE PROGRAM FUNCTION INTERFACE
ENDMODULE ENDSUBROUTINE ENDPROGRAM ENDFUNCTION ENDINTERFACE
BLOCKDATA DOUBLEPRECISION
MODULEPROCEDURE
TYPE ENDTYPE
CONTAINS
USE ONLY
ALLOCATABLE DIMENSION INTENT EXTERNAL INTRINSIC OPTIONAL PARAMETER POINTER
COMMON
FORMAT
IMPLICIT NONE
PRIVATE PUBLIC
CHARACTER COMPLEX INTEGER LOGICAL
ENTRY EQUIVALENCE INCLUDE NAMELIST SAVE SEQUENCE TARGET
ELEMENTAL PURE RECURSIVE RESULT
SELECTCASE CASE CASEDEFAULT ENDSELECT
IF THEN ELSEIF ELSE ENDIF
WHERE ELSEWHERE ENDWHERE
FORALL ENDFORALL
DO DOWHILE ENDDO
ALLOCATE ASSIGN BACKSPACE CALL CLOSE CONTINUE CYCLE DEALLOCATE ENDFILE
EXIT FORMAT GOTO INQUIRE NULLIFY OPEN PAUSE PRINT READ RETURN REWIND STOP WRITE
""".split()
lowercase_keywords = r"""
in inout out
""".split()
intrinsics = r"""
abort abs achar acos acosd acosh adjustl adjustr aimag aint all allocated and anint any asin
asind asinh associated atan atan2 atan2d atand atanh
baddress bit_size btest
ceiling char cmplx conjg cos cosd cosh count cshift
date date_and_time dble dcmplx dfloat digits dim dnum dot_product dprod dreal
eoshift epsilon exit exp exponent
floor flush fnum fraction free fset fstream
getarg getenv gran
hfix huge
iachar iaddr iand iargc ibclr ibits ibset ichar idate idim ieor igetarg ijint imag index int int1
int2 int4 int8 inum iomsg ior iqint irand iranp ishft ishftc isign ixor izext
jnum jzext
kind kzext
lbound len len_trim lge lgt lle llt loc log log10 lshft lshift
malloc matmul max maxexponent maxloc maxval mclock merge min minexponent minloc minval mod modulo mvbits
nearest nint not
or
pack precision present product
qext qfloat qnum qprod
radix ran rand random_number random_seed range repeat reshape rnum rrspacing rshft rshift
scale scan secnds selected_int_kind selected_real_kind set_exponent shape sign sin sind sinh size
sizeof spacing spread sqrt srand sum system system_clock
tan tand tanh time tiny transfer transpose trim
ubound unpack
verify xor zext
""".split()
ignore_for_the_moment = r"""
real REAL isnan
"""
special_keywords = r"""
.and. .or. .not. .true. .false. .eqv. .neqv.
.eq. .ge. .gt. .le. .lt. .ne.
""".replace(".","\\.").split()
def uppercase_fn(s):
return s.group(0).upper()
def lowercase_fn(s):
return s.group(0).lower()
def special_fn(s):
res = s.group(0).lower()
res = {
'.eq.': '==',
'.ge.': '>=',
'.gt.': '>',
'.le.': '<=',
'.lt.': '<',
'.ne.': '/=',
}.get(res,res)
return res
uppercase_re = re.compile(r"\b("+"|".join(uppercase_keywords)+r")\b",re.I)
lowercase_re = re.compile(r"\b("+"|".join(lowercase_keywords+intrinsics)+r")\b",re.I)
special_re = re.compile(r"("+"|".join(special_keywords)+r")",re.I)
def correctcase(line):
line = dropspace_re.sub(dropspace_fn,line)
line = uppercase_re.sub(uppercase_fn,line)
line = lowercase_re.sub(lowercase_fn,line)
line = special_re.sub(special_fn,line)
line = splitword_re.sub(splitword_fn,line)
return line
##############
quote = " "
QUOTES = "'\""
for lin in sys.stdin:
lin = lin.rstrip().expandtabs()
pos = 0
lout = ""
if lin[:1] == "#":
lout=lin
pos=len(lin)
while pos < len(lin):
if quote in QUOTES:
npos = lin.find(quote,pos)
if npos >= 0:
assert lin[npos] == quote
lout += lin[pos:npos+1]
pos = npos+1
quote = " "
elif lin[-1] == "&":
lout += lin[pos:]
break
else:
raise "unterminated string in line ["+lin+"]"
cpos = lin.find("!",pos) % (len(lin)+1)
qpos = lin.find("'",pos) % (len(lin)+1)
dpos = lin.find('"',pos) % (len(lin)+1)
npos = min(cpos,qpos,dpos)
lout += correctcase(lin[pos:npos])
pos = npos
if pos == len(lin):
break
elif lin[pos] == "!":
lout += lin[pos:]
break
elif lin[pos] in QUOTES:
quote = lin[pos]
lout += quote
pos += 1
continue
else:
raise "Strange internal error"
sys.stdout.write(lout+"\n")
| gpl-2.0 | 6,180,127,024,157,767,000 | 25.790179 | 104 | 0.64256 | false |
Fireblend/chromium-crosswalk | tools/perf/page_sets/key_mobile_sites_repaint.py | 5 | 7857 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
from page_sets import key_mobile_sites_pages
from page_sets import repaint_helpers
def _CreatePageClassWithRepaintInteractions(page_cls, mode, height, width):
class DerivedRepaintPage(page_cls): # pylint: disable=W0232
def RunPageInteractions(self, action_runner):
repaint_helpers.Repaint(
action_runner, mode=mode, width=width, height=height)
return DerivedRepaintPage
class KeyMobileSitesRepaintPage(page_module.Page):
def __init__(self, url, page_set, mode, height, width, name='', labels=None):
super(KeyMobileSitesRepaintPage, self).__init__(
url=url, page_set=page_set, name=name,
credentials_path='data/credentials.json', labels=labels)
self.user_agent_type = 'mobile'
self.archive_data_file = 'data/key_mobile_sites_repaint.json'
self._mode = mode
self._width = width
self._height = height
def RunPageInteractions(self, action_runner):
repaint_helpers.Repaint(
action_runner, mode=self._mode, width=self._width, height=self._height)
class KeyMobileSitesRepaintPageSet(page_set_module.PageSet):
""" Key mobile sites with repaint interactions. """
def __init__(self, mode='viewport', width=None, height=None):
super(KeyMobileSitesRepaintPageSet, self).__init__(
user_agent_type='mobile',
archive_data_file='data/key_mobile_sites_repaint.json',
bucket=page_set_module.PARTNER_BUCKET)
# Add pages with predefined classes that contain custom navigation logic.
predefined_page_classes = [
key_mobile_sites_pages.CapitolVolkswagenPage,
key_mobile_sites_pages.TheVergeArticlePage,
key_mobile_sites_pages.CnnArticlePage,
key_mobile_sites_pages.FacebookPage,
key_mobile_sites_pages.YoutubeMobilePage,
key_mobile_sites_pages.LinkedInPage,
key_mobile_sites_pages.YahooAnswersPage,
key_mobile_sites_pages.GmailPage,
key_mobile_sites_pages.GoogleNewsMobilePage,
key_mobile_sites_pages.GoogleNewsMobile2Page,
key_mobile_sites_pages.AmazonNicolasCagePage,
# Page behaves non-deterministically, replaced with test version for now.
# key_mobile_sites_pages.GroupClonedPage,
# mean_input_event_latency cannot be tracked correctly for
# GroupClonedListImagesPage. See crbug.com/409086.
# key_mobile_sites_pages.GroupClonedListImagesPage,
]
for page_class in predefined_page_classes:
self.AddUserStory(
_CreatePageClassWithRepaintInteractions(
page_class, mode=mode, height=height, width=width)(self))
# Add pages with custom labels.
# Why: Top news site.
self.AddUserStory(KeyMobileSitesRepaintPage(
url='http://nytimes.com/', page_set=self, labels=['fastpath'],
mode=mode, height=height, width=width))
# Why: Image-heavy site.
self.AddUserStory(KeyMobileSitesRepaintPage(
url='http://cuteoverload.com', page_set=self, labels=['fastpath'],
mode=mode, height=height, width=width))
# Why: #11 (Alexa global), google property; some blogger layouts
# have infinite scroll but more interesting.
self.AddUserStory(KeyMobileSitesRepaintPage(
url='http://googlewebmastercentral.blogspot.com/',
page_set=self, name='Blogger', mode=mode, height=height, width=width))
# Why: #18 (Alexa global), Picked an interesting post """
self.AddUserStory(KeyMobileSitesRepaintPage(
# pylint: disable=line-too-long
url='http://en.blog.wordpress.com/2012/09/04/freshly-pressed-editors-picks-for-august-2012/',
page_set=self,
name='Wordpress', mode=mode, height=height, width=width))
# Why: #6 (Alexa) most visited worldwide, picked an interesting page
self.AddUserStory(KeyMobileSitesRepaintPage(
url='http://en.wikipedia.org/wiki/Wikipedia',
page_set=self,
name='Wikipedia (1 tab)', mode=mode, height=height, width=width))
# Why: #8 (Alexa global), picked an interesting page
# Forbidden (Rate Limit Exceeded)
# self.AddUserStory(KeyMobileSitesRepaintPage(
# url='http://twitter.com/katyperry', page_set=self, name='Twitter'))
# Why: #37 (Alexa global) """
self.AddUserStory(KeyMobileSitesRepaintPage(
url='http://pinterest.com',
page_set=self,
name='Pinterest', mode=mode, height=height, width=width))
# Why: #1 sports.
# Fails often; crbug.com/249722'
# self.AddUserStory(KeyMobileSitesRepaintPage(
# url='http://espn.go.com', page_set=self, name='ESPN'))
# Why: crbug.com/231413
# Doesn't scroll; crbug.com/249736
# self.AddUserStory(KeyMobileSitesRepaintPage(
# url='http://forecast.io', page_set=self))
# Why: crbug.com/169827
self.AddUserStory(KeyMobileSitesRepaintPage(
url='http://slashdot.org/', page_set=self, labels=['fastpath'],
mode=mode, width=width, height=height))
# Why: #5 Alexa news """
self.AddUserStory(KeyMobileSitesRepaintPage(
url='http://www.reddit.com/r/programming/comments/1g96ve',
page_set=self, labels=['fastpath'],
mode=mode, width=width, height=height))
# Why: Problematic use of fixed position elements """
self.AddUserStory(KeyMobileSitesRepaintPage(
url='http://www.boingboing.net', page_set=self, labels=['fastpath'],
mode=mode, width=width, height=height))
# Add simple pages with no custom navigation logic or labels.
urls_list = [
# Why: Social; top Google property; Public profile; infinite scrolls.
# pylint: disable=line-too-long
'https://plus.google.com/app/basic/110031535020051778989/posts?source=apppromo',
# Why: crbug.com/242544
('http://www.androidpolice.com/2012/10/03/rumor-evidence-mounts-that-an-'
'lg-optimus-g-nexus-is-coming-along-with-a-nexus-phone-certification-'
'program/'),
# Why: crbug.com/149958
'http://gsp.ro',
# Why: Top tech blog
'http://theverge.com',
# Why: Top tech site
'http://digg.com',
# Why: Top Google property; a Google tab is often open
'https://www.google.com/#hl=en&q=barack+obama',
# Why: #1 news worldwide (Alexa global)
'http://news.yahoo.com',
# Why: #2 news worldwide
'http://www.cnn.com',
# Why: #1 commerce website by time spent by users in US
'http://shop.mobileweb.ebay.com/searchresults?kw=viking+helmet',
# Why: #1 Alexa recreation
# pylint: disable=line-too-long
'http://www.booking.com/searchresults.html?src=searchresults&latitude=65.0500&longitude=25.4667',
# Why: #1 Alexa sports
'http://sports.yahoo.com/',
# Why: Top tech blog
'http://techcrunch.com',
# Why: #6 Alexa sports
'http://mlb.com/',
# Why: #14 Alexa California
'http://www.sfgate.com/',
# Why: Non-latin character set
'http://worldjournal.com/',
# Why: Mobile wiki
'http://www.wowwiki.com/World_of_Warcraft:_Mists_of_Pandaria',
# Why: #15 Alexa news
'http://online.wsj.com/home-page',
# Why: Image-heavy mobile site
'http://www.deviantart.com/',
# Why: Top search engine
('http://www.baidu.com/s?wd=barack+obama&rsv_bp=0&rsv_spt=3&rsv_sug3=9&'
'rsv_sug=0&rsv_sug4=3824&rsv_sug1=3&inputT=4920'),
# Why: Top search engine
'http://www.bing.com/search?q=sloths',
# Why: Good example of poor initial scrolling
'http://ftw.usatoday.com/2014/05/spelling-bee-rules-shenanigans'
]
for url in urls_list:
self.AddUserStory(KeyMobileSitesRepaintPage(
url, self, mode=mode, height=height, width=width))
| bsd-3-clause | -3,172,912,236,625,451,500 | 39.292308 | 103 | 0.678503 | false |
fspaolo/scikit-learn | examples/cluster/plot_kmeans_digits.py | 8 | 4495 | """
===========================================================
A demo of K-Means clustering on the handwritten digits data
===========================================================
In this example with compare the various initialization strategies for
K-means in terms of runtime and quality of the results.
As the ground truth is known here, we also apply different cluster
quality metrics to judge the goodness of fit of the cluster labels to the
ground truth.
Cluster quality metrics evaluated (see :ref:`clustering_evaluation` for
definitions and discussions of the metrics):
=========== ========================================================
Shorthand full name
=========== ========================================================
homo homogeneity score
compl completeness score
v-meas V measure
ARI adjusted Rand index
AMI adjusted mutual information
silhouette silhouette coefficient
=========== ========================================================
"""
print(__doc__)
from time import time
import numpy as np
import pylab as pl
from sklearn import metrics
from sklearn.cluster import KMeans
from sklearn.datasets import load_digits
from sklearn.decomposition import PCA
from sklearn.preprocessing import scale
np.random.seed(42)
digits = load_digits()
data = scale(digits.data)
n_samples, n_features = data.shape
n_digits = len(np.unique(digits.target))
labels = digits.target
sample_size = 300
print("n_digits: %d, \t n_samples %d, \t n_features %d"
% (n_digits, n_samples, n_features))
print(79 * '_')
print('% 9s' % 'init'
' time inertia homo compl v-meas ARI AMI silhouette')
def bench_k_means(estimator, name, data):
t0 = time()
estimator.fit(data)
print('% 9s %.2fs %i %.3f %.3f %.3f %.3f %.3f %.3f'
% (name, (time() - t0), estimator.inertia_,
metrics.homogeneity_score(labels, estimator.labels_),
metrics.completeness_score(labels, estimator.labels_),
metrics.v_measure_score(labels, estimator.labels_),
metrics.adjusted_rand_score(labels, estimator.labels_),
metrics.adjusted_mutual_info_score(labels, estimator.labels_),
metrics.silhouette_score(data, estimator.labels_,
metric='euclidean',
sample_size=sample_size)))
bench_k_means(KMeans(init='k-means++', n_clusters=n_digits, n_init=10),
name="k-means++", data=data)
bench_k_means(KMeans(init='random', n_clusters=n_digits, n_init=10),
name="random", data=data)
# in this case the seeding of the centers is deterministic, hence we run the
# kmeans algorithm only once with n_init=1
pca = PCA(n_components=n_digits).fit(data)
bench_k_means(KMeans(init=pca.components_, n_clusters=n_digits, n_init=1),
name="PCA-based",
data=data)
print(79 * '_')
###############################################################################
# Visualize the results on PCA-reduced data
reduced_data = PCA(n_components=2).fit_transform(data)
kmeans = KMeans(init='k-means++', n_clusters=n_digits, n_init=10)
kmeans.fit(reduced_data)
# Step size of the mesh. Decrease to increase the quality of the VQ.
h = .02 # point in the mesh [x_min, m_max]x[y_min, y_max].
# Plot the decision boundary. For that, we will assign a color to each
x_min, x_max = reduced_data[:, 0].min() + 1, reduced_data[:, 0].max() - 1
y_min, y_max = reduced_data[:, 1].min() + 1, reduced_data[:, 1].max() - 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
# Obtain labels for each point in mesh. Use last trained model.
Z = kmeans.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
pl.figure(1)
pl.clf()
pl.imshow(Z, interpolation='nearest',
extent=(xx.min(), xx.max(), yy.min(), yy.max()),
cmap=pl.cm.Paired,
aspect='auto', origin='lower')
pl.plot(reduced_data[:, 0], reduced_data[:, 1], 'k.', markersize=2)
# Plot the centroids as a white X
centroids = kmeans.cluster_centers_
pl.scatter(centroids[:, 0], centroids[:, 1],
marker='x', s=169, linewidths=3,
color='w', zorder=10)
pl.title('K-means clustering on the digits dataset (PCA-reduced data)\n'
'Centroids are marked with white cross')
pl.xlim(x_min, x_max)
pl.ylim(y_min, y_max)
pl.xticks(())
pl.yticks(())
pl.show()
| bsd-3-clause | 9,016,195,088,582,753,000 | 34.393701 | 79 | 0.594883 | false |
wmbutler/courtlistener | alert/corpus_importer/lawbox/generate_judge_stats.py | 2 | 4545 | import datetime
import pickle
import pprint
import argparse
import time
from alert.corpus_importer.lawbox.import_law_box import (
get_judge, get_court_object, get_html_from_raw_text,
)
from alert.search.models import Court
DEBUG = 4
##########################################
# This variable is used to do statistical work on Opinions whose jurisdiction is unclear. The problem is that
# many Opinions, probably thousands of them, have a court like, "D. Wisconsin." Well, Wisconsin has an east and
# west district, but no generic district, so this has to be resolved. When we hit such a case, we set it aside
# for later processing, once we've processed all the easy cases. At that point, we will have the variable below,
# judge stats, which will have all of the judges along with a count of their jurisdictions:
# judge_stats = {
# 'McKensey': {
# 'wied': 998,
# 'wis': 2
# }
# }
# So in this case, you can see quite clearly that McKensey is a judge at wied, and we can classify the case as
# such.
##########################################
try:
with open('judge_stats.pkl', 'rb') as fix_file:
judge_stats = pickle.load(fix_file)
except (IOError, EOFError):
judge_stats = {}
all_courts = Court.objects.all()
def get_judge_and_court(case_path):
raw_text = open(case_path).read()
clean_html_tree, complete_html_tree, clean_html_str, body_text = get_html_from_raw_text(raw_text)
judge = get_judge(clean_html_tree, case_path)
court = get_court_object(clean_html_tree, case_path=case_path)
if judge in judge_stats:
if court in judge_stats[judge]:
judge_stats[judge][court] += 1
else:
judge_stats[judge][court] = 1
else:
judge_stats[judge] = {court: 1}
def main():
parser = argparse.ArgumentParser(description='Import the corpus provided by lawbox')
parser.add_argument('-f', '--file', type=str, default="index.txt", required=False, dest="file_name",
help="The file that has all the URLs to import, one per line.")
parser.add_argument('-l', '--line', type=int, default=1, required=False,
help='If provided, this will be the line number in the index file where we resume processing.')
parser.add_argument('-r', '--resume', default=False, required=False, action='store_true',
help='Use the saved marker to resume operation where it last failed.')
args = parser.parse_args()
def case_generator(line_number):
"""Yield cases from the index file."""
index_file = open(args.file_name)
for i, line in enumerate(index_file):
if i > line_number:
yield line.strip()
if args.resume:
with open('lawbox_progress_marker_judge_stat_generator.txt') as marker:
resume_point = int(marker.read().strip())
cases = case_generator(resume_point)
i = resume_point
else:
cases = case_generator(args.line)
i = args.line
t1 = time.time()
timings = []
for case_path in cases:
if i % 1000 == 1:
t1 = time.time()
if DEBUG >= 2 and i % 1000 == 0:
t2 = time.time()
timings.append(t2 - t1)
average_per_s = 1000 / (sum(timings) / float(len(timings)))
print "\nCompleted 1000 cases in %0.1f seconds (average: %0.1f/s, %0.1f/m, %0.1f/h)" % \
((t2 - t1), average_per_s, average_per_s * 60, average_per_s * 60 * 60)
print "%s: Doing case (%s): file://%s" % (datetime.datetime.now(), i, case_path)
try:
doc = get_judge_and_court(case_path)
i += 1
except:
print "Last case was number %s: %s" % (i, case_path)
with open('lawbox_progress_marker_judge_stat_generator.txt', 'w') as marker:
marker.write(str(i))
with open('judge_stats.pkl', 'wb') as fix_file:
pickle.dump(judge_stats, fix_file)
with open('judge_stats.py', 'wb') as stats_file:
pprint.pprint(judge_stats, stream=stats_file, indent=4)
raise
with open('lawbox_progress_marker_judge_stat_generator.txt', 'w') as marker:
marker.write(str(i))
with open('judge_stats.pkl', 'wb') as fix_file:
pickle.dump(judge_stats, fix_file)
with open('judge_stats.py', 'wb') as stats_file:
pprint.pprint(judge_stats, stream=stats_file, indent=4)
if __name__ == '__main__':
main()
| agpl-3.0 | 3,346,779,324,005,375,500 | 38.868421 | 119 | 0.5967 | false |
proxysh/Safejumper-for-Mac | buildlinux/env64/lib/python2.7/site-packages/twisted/python/shortcut.py | 12 | 2441 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""Creation of Windows shortcuts.
Requires win32all.
"""
from win32com.shell import shell
import pythoncom
import os
def open(filename):
"""Open an existing shortcut for reading.
@return: The shortcut object
@rtype: Shortcut
"""
sc=Shortcut()
sc.load(filename)
return sc
class Shortcut:
"""A shortcut on Win32.
>>> sc=Shortcut(path, arguments, description, workingdir, iconpath, iconidx)
@param path: Location of the target
@param arguments: If path points to an executable, optional arguments to
pass
@param description: Human-readable description of target
@param workingdir: Directory from which target is launched
@param iconpath: Filename that contains an icon for the shortcut
@param iconidx: If iconpath is set, optional index of the icon desired
"""
def __init__(self,
path=None,
arguments=None,
description=None,
workingdir=None,
iconpath=None,
iconidx=0):
self._base = pythoncom.CoCreateInstance(
shell.CLSID_ShellLink, None,
pythoncom.CLSCTX_INPROC_SERVER, shell.IID_IShellLink
)
data = map(None,
['"%s"' % os.path.abspath(path), arguments, description,
os.path.abspath(workingdir), os.path.abspath(iconpath)],
("SetPath", "SetArguments", "SetDescription",
"SetWorkingDirectory") )
for value, function in data:
if value and function:
# call function on each non-null value
getattr(self, function)(value)
if iconpath:
self.SetIconLocation(iconpath, iconidx)
def load( self, filename ):
"""Read a shortcut file from disk."""
self._base.QueryInterface(pythoncom.IID_IPersistFile).Load(filename)
def save( self, filename ):
"""Write the shortcut to disk.
The file should be named something.lnk.
"""
self._base.QueryInterface(pythoncom.IID_IPersistFile).Save(filename, 0)
def __getattr__( self, name ):
if name != "_base":
return getattr(self._base, name)
raise AttributeError("%s instance has no attribute %s" % (
self.__class__.__name__, name))
| gpl-2.0 | 6,832,442,379,185,030,000 | 31.118421 | 80 | 0.598116 | false |
skapfer/rubber | src/tex.py | 1 | 14284 | # This file is part of Rubber and thus covered by the GPL
# (c) Emmanuel Beffara, 2008
# vim: noet:ts=4
"""
General-purpose classes for reading TeX code.
Classes and functions from this module can be used without Rubber.
"""
import re
from io import StringIO
# The catcodes
EOF = -2
CSEQ = -1
ESCAPE = 0
OPEN = 1
CLOSE = 2
MATH = 3
ALIGN = 4
END_LINE = 5
ARGUMENT = 6
SUPER = 7
SUB = 8
IGNORE = 9
SPACE = 10
LETTER = 11
OTHER = 12
ACTIVE = 13
COMMENT = 14
INVALID = 15
cat_names = {
-2: 'EOF',
-1: 'CSEQ',
0: 'ESCAPE',
1: 'OPEN',
2: 'CLOSE',
3: 'MATH',
4: 'ALIGN',
5: 'END_LINE',
6: 'ARGUMENT',
7: 'SUPER',
8: 'SUB',
9: 'IGNORE',
10: 'SPACE',
11: 'LETTER',
12: 'OTHER',
13: 'ACTIVE',
14: 'COMMENT',
15: 'INVALID'
}
# The default categories
catcodes = {
'\\' : ESCAPE,
'{' : OPEN,
'}' : CLOSE,
'$' : MATH,
'&' : ALIGN,
'\n' : END_LINE,
'#' : ARGUMENT,
'^' : SUPER,
'_' : SUB,
'\000' : IGNORE,
' ' : SPACE, '\t' : SPACE,
'~' : ACTIVE,
'%' : COMMENT,
'\177' : INVALID
}
for i in range(0,26):
catcodes[chr(ord('A')+i)] = LETTER
catcodes[chr(ord('a')+i)] = LETTER
class Position:
"""
A class to represent positions in a source file.
"""
def __init__ (self, file=None, line=None, char=None):
self.file = file
self.line = line
self.char = char
def __str__ (self):
text = ''
if self.file:
text = file + ':'
if self.line is not None:
if text != '':
text += ':'
text += '%d' % self.line
if self.char is not None:
text += ':%d' % self.char
return text
class Token:
"""
The class used to represent tokens. Objects contain a catcode, a value
(for control sequences) and the raw text that represents them in the input
file.
"""
def __init__ (self, cat, val=None, raw=None, pos=None):
self.cat = cat
self.val = val
self.raw = raw
self.pos = pos
def __repr__ (self):
text = 'Token(' + cat_names[self.cat]
if self.val is not None:
text += ', ' + repr(self.val)
return text + ')'
class TokenList (list):
"""
This class represents a token list. It behaves as a standard list with
some extra functionality.
"""
def __init__ (self, data=[], pos=None):
super (TokenList, self).__init__(data)
if pos is None and len(data) > 0:
self.pos = data[0].pos
else:
self.pos = pos
def raw_text (self):
"""
Return the textual representation of the token list by concatenating
the raw text of the tokens.
"""
text = ''
for token in self:
text += token.raw
return text
class ParserBase (object):
"""
This is the base class for parsers. It holds state information like
catcodes, handles the push-back buffer, and leaves it to derived classes
to actually read tokens, using the "read_token" method. This class also
provides high-level functionality like getting macro arguments.
"""
def __init__ (self):
self.catcodes = catcodes.copy()
self.next = []
self.math_mode = 0
self.last_is_math = 0
self.pos = None
def catcode (self, char):
"""
Return the catcode of a character.
"""
if char in self.catcodes:
return self.catcodes[char]
else:
return OTHER
def put_token (self, token):
"""
Put back a token in the input.
"""
self.next.append(token)
def put_list (self, list):
"""
Put back a token list in the input.
"""
arg = list[:]
arg.reverse()
self.next.extend(arg)
def peek_token (self):
"""
Return the next token that will be read without updating the state.
"""
if len(self.next) > 0:
return self.next[-1]
token = self.read_token()
self.put_token(token)
return token
def get_token (self):
"""
Get the next token from the input and update the math mode.
"""
if len(self.next) > 0:
token = self.next.pop()
else:
token = self.read_token()
# skip over comment
if token.cat == COMMENT:
assert len(self.next) == 0
assert self.next_char is None
self.read_line()
return self.read_token()
if token.cat == MATH:
if self.last_is_math:
if self.math_mode == 1:
self.math_mode = 2
self.last_is_math = 0
else:
if self.math_mode == 0:
self.math_mode = 1
else:
self.math_mode = 0
self.last_is_math = 1
else:
self.last_is_math = 0
return token
def __iter__ (self):
"""
Return an iterator over all tokens in the input. The EOF token is not
returned by this iterator.
"""
while 1:
token = self.get_token()
if token.cat == EOF:
break
yield token
def skip_space (self):
"""
Skip white space in the input.
"""
while self.peek_token().cat in (SPACE, END_LINE):
self.get_token()
def get_group (self):
"""
Get the list of tokens up to the next closing brace, and drop the
closing brace.
"""
value = TokenList()
level = 1
while 1:
token = self.get_token()
if token.cat == OPEN:
level += 1
elif token.cat == CLOSE:
level -= 1
if level == 0:
break
elif token.cat == EOF:
break
value.append(token)
return value
def get_group_text (self):
"""
Get the list of tokens up to the next closing brace, and drop the
closing brace. Return the list as a string.
"""
value = ""
level = 1
while 1:
token = self.get_token()
if token.cat == OPEN:
level += 1
elif token.cat == CLOSE:
level -= 1
if level == 0:
break
elif token.cat == EOF:
break
value += token.raw
return value
def get_argument (self):
"""
Get a macro argument from the input text. Returns a token list with
the value of the argument, with surrounding braces removed if
relevant.
"""
self.skip_space()
token = self.get_token()
if token.cat == EOF:
return TokenList()
if token.cat != OPEN:
return TokenList(data=[token])
return self.get_group()
def get_argument_text (self):
"""
Get a macro argument from the input text. Returns a string with
the text of the argument, with surrounding braces removed if
relevant.
"""
self.skip_space()
token = self.get_token()
if token.cat == EOF:
return None
if token.cat != OPEN:
return token.raw
return self.get_group_text()
def get_latex_optional (self):
"""
Check if a LaTeX-style optional argument is present. If such an
argument is present, return it as a token list, otherwise return None.
"""
self.skip_space()
next = self.get_token()
if next.cat != OTHER or next.raw != '[':
self.put_token(next)
return None
level = 0
list = TokenList()
while True:
token = self.get_token()
if token.cat == EOF:
break
if token.cat == OTHER and token.raw == ']' and level == 0:
break
if token.cat == OPEN:
level += 1
elif token.cat == CLOSE:
if level == 0:
break
level -= 1
list.append(token)
return list
def get_latex_optional_text (self):
"""
Check if a LaTeX-style optional argument is present. If such an
argument is present, return it as text, otherwise return None.
"""
list = self.get_latex_optional()
if list is None:
return None
return list.raw_text()
def get_latex_star (self):
"""
Check if the command is a starred one. If so, eat the star,
and return True. Otherwise, return False.
"""
nextt = self.peek_token()
if (nextt.cat, nextt.raw) == (OTHER, '*'):
self.get_token()
return True
else:
return False
def re_set (set, complement=False):
"""
Returns a string that contains a regular expression matching a given set
of characters, or its complement if the optional argument is true. The set
must not be empty.
"""
if len(set) == 0:
raise RuntimeError('argument of re_set must not be empty')
if not complement and len(set) == 1:
c = set[0]
if c in '.^$*+?{}\\[]|()':
return '\\' + c
else:
return c
expr = '['
if complement:
expr += '^'
for c in set:
if c in ']-\\':
expr += '\\' + c
else:
expr += c
return expr + ']'
class Parser (ParserBase):
"""
A parser for TeX code that reads its input from a file object.
The class also provides a hook feature: the method 'set_hooks' declares a
set of control sequence names, and the method 'next_hook' parses the input
until it finds a control sequence from this set, ignoring all other
tokens. This advantage of this method is that is is much faster than
reading tokens one by one.
"""
def __init__ (self, input):
"""
Initialise the parser with a file as input.
If 'input' is None, then input can only be provided by
the 'put_token' and 'put_list' methods.
"""
super (Parser, self).__init__()
self.input = input
self.line = ""
self.pos_line = 1
self.pos_char = 1
self.next_char = None
def read_line (self):
"""
Reads a line of input and sets the attribute 'line' with it. Returns
True if reading succeeded and False if it failed.
"""
if self.input is None:
return False
self.line = self.input.readline()
if self.line == "":
return False
return True
def read_char (self):
"""
Get the next character from the input and its catcode (without parsing
control sequences).
"""
if self.next_char is not None:
t = self.next_char
self.next_char = None
return t
while self.line == "":
if not self.read_line():
return Token(EOF)
c = self.line[0]
self.line = self.line[1:]
pos = Position(line=self.pos_line, char=self.pos_char)
if c == '\n':
self.pos_line += 1
self.pos_char = 1
else:
self.pos_char += 1
return Token(self.catcode(c), raw=c, pos=pos)
def read_token (self):
"""
Get the next token from the input.
"""
token = self.read_char()
if token.cat != ESCAPE:
if token.cat in (LETTER, OTHER):
token.val = token.raw
return token
pos = token.pos
raw = token.raw
token = self.read_char()
if token.cat != LETTER:
token.cat = CSEQ
token.val = token.raw
token.raw = raw + token.raw
return token
name = ""
while token.cat == LETTER:
raw += token.raw
name += token.raw
token = self.read_char()
while token.cat == SPACE:
raw += token.raw
token = self.read_char()
self.next_char = token
return Token(CSEQ, name, raw, pos=pos)
def re_cat (self, *cat):
"""
Returns a regular expression that maches characters whose category is
in given list.
"""
return re_set([char for char,code in self.catcodes.items() if code in cat])
def re_nocat (self, *cat):
"""
Returns a regular expression that maches characters whose category is
not in a given list.
"""
return re_set([char for char,code in self.catcodes.items() if code in cat], True)
def set_hooks (self, names):
"""
Define the set of hooks for 'next_hook'.
"""
expr = '(' \
+ self.re_nocat(ESCAPE, COMMENT) + '|' \
+ self.re_cat(ESCAPE) + self.re_cat(ESCAPE, COMMENT) + ')*' \
+ '(?P<raw>' + self.re_cat(ESCAPE) \
+ '(?P<val>' + '|'.join(names) + ')' \
+ '(' + self.re_cat(SPACE) + '+|(?=' + self.re_nocat(LETTER) + ')|$))'
self.regex = re.compile(expr)
def next_hook (self):
"""
Ignore input until the next control sequence from the set defined by
'set_hooks'. Returns the associated token, or the EOF token if no hook
was found.
"""
while self.line == "":
if not self.read_line():
return Token(EOF)
while True:
match = self.regex.match(self.line)
if match is not None:
self.pos_char = match.end('raw') + 1
self.line = self.line[match.end('raw'):]
return Token(CSEQ, match.group('val'), match.group('raw'))
if not self.read_line():
return Token(EOF)
self.pos_line += 1
self.pos_char = 1
def parse_string (text):
"""
Factory function for parsing TeX code from a string.
"""
return Parser(StringIO(text))
| gpl-2.0 | 2,797,327,243,951,643,000 | 26.682171 | 89 | 0.506511 | false |
iovation/launchkey-python | launchkey/utils/shared.py | 2 | 5725 | """ Shared Utilities """
from functools import wraps
from uuid import UUID
import warnings
import six
from jwkest import JWKESTException
from ..exceptions import InvalidIssuerFormat, InvalidIssuerVersion, \
JWTValidationFailure, InvalidJWTResponse, WebhookAuthorizationError, \
XiovJWTValidationFailure, XiovJWTDecryptionFailure
class XiovJWTService(object):
"""
Handles the x-iov-jwt request spec validation and decryption
"""
def __init__(self, transport, subject):
self._transport = transport
self._subject = subject
def verify_jwt_request(self, body, headers, method, path):
"""
Retrieves and validates an x-iov-jwt payload
:param body: The raw body that was send in the POST content
:param headers: A generic map of response headers. These will be used
to access and validate authorization
:param method: The HTTP method of the request
:param path: The path of the request
:return: utf-8 encoded string of the body
:raises launchkey.exceptions.XiovJWTValidationFailure: when the
request or its cannot be parsed or fails
validation.
:raises launchkey.exceptions.WebhookAuthorizationError: when the
"Authorization" header in the headers.
"""
if not isinstance(body, six.string_types):
body = body.decode("utf-8")
compact_jwt = None
for header_key, header_value in headers.items():
if header_key.lower() == 'x-iov-jwt':
compact_jwt = header_value
if compact_jwt is None:
raise WebhookAuthorizationError(
"The X-IOV-JWT header was not found in the supplied headers "
"from the request!")
try:
self._transport.verify_jwt_request(
compact_jwt,
self._subject,
method,
path,
body)
except (JWTValidationFailure, InvalidJWTResponse) as reason:
raise XiovJWTValidationFailure(reason=reason)
return body
def decrypt_jwe(self, body, headers, method, path):
"""
Verifies and decrypts a jwt request
:param body: The raw body that was send in the POST content
:param headers: A generic map of response headers. These will be used
to access and validate authorization
:param method: The HTTP method of the request
:param path: The path of the request
:raises launchkey.exceptions.UnexpectedKeyID: when the request body is
decrypted using a public key whose private key is not known by the
client. This can be a configuration issue.
:raises launchkey.exceptions.XiovJWTDecryptionFailure: when the request
body cannot be decrypted.
:return: Decrypted string
"""
body = self.verify_jwt_request(body, headers, method, path)
try:
return self._transport.decrypt_response(body)
except JWKESTException as reason:
raise XiovJWTDecryptionFailure(reason)
class UUIDHelper(object):
"""
Validate the provided uuid string and return a UUID if string is a valid
UUID with the correct version or throw InvalidIssuerFormat or
InvalidIssuerVersion when that criteria is not met.
"""
def from_string(self, uuid_value, version=None):
"""
Create a UUID from its string representation
:param uuid_value: The string representation of a UUID
:param version: The version of the UUID
:return: UUID
:raises launchkey.exceptions.InvalidIssuerFormat: when uuid_value
is not a valid UUID format
:raises launchkey.exceptions.InvalidIssuerVersion: when uuid_value
is not the same version as version.
"""
if not isinstance(uuid_value, UUID):
try:
uuid_value = UUID(uuid_value)
except (ValueError, TypeError, AttributeError):
raise InvalidIssuerFormat()
self.validate_version(uuid_value, version)
return uuid_value
@staticmethod
def validate_version(uuid_value, version):
"""
Validate the the provided UUID is the provided version
:param uuid_value: A UUID
:param version: The expected version of the UUID
:return: None
:raises launchkey.exceptions.InvalidIssuerFormat: when uuid_value
is not a UUID
:raises launchkey.exceptions.InvalidIssuerVersion: when uuid_value
is not the same version as version.
"""
if not isinstance(uuid_value, UUID):
raise InvalidIssuerFormat()
if version is not None and int(version) != uuid_value.version:
raise InvalidIssuerVersion()
def iso_format(datetime):
"""
Generates an ISO formatted datetime based on what the LaunchKey API
expects. This is a standard ISO datetime without microseconds.
:param datetime: datetime.datetime object
:return: ISO formatted string IE: 2017-10-03T22:50:15Z
"""
return datetime.strftime("%Y-%m-%dT%H:%M:%SZ") \
if datetime is not None else None
# pylint: disable=invalid-name
def deprecated(fn):
"""
Decorator for issuing warnings
:param fn: Function to be called
:return Any: The expected return of the passed function
"""
@wraps(fn)
def wrapper(*args, **kwargs):
""" Decorator function """
warnings.warn("The %s method has been deprecated and will be removed "
"in the next major release." % fn.__name__,
DeprecationWarning)
return fn(*args, **kwargs)
return wrapper
| mit | -5,356,389,399,389,039,000 | 35.006289 | 79 | 0.644891 | false |
jjmachan/activityPointsApp | activitypoints/lib/python3.5/site-packages/django/core/cache/backends/filebased.py | 73 | 5319 | "File-based cache backend"
import errno
import glob
import hashlib
import io
import os
import random
import tempfile
import time
import zlib
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
from django.core.files.move import file_move_safe
from django.utils.encoding import force_bytes
try:
from django.utils.six.moves import cPickle as pickle
except ImportError:
import pickle
class FileBasedCache(BaseCache):
cache_suffix = '.djcache'
def __init__(self, dir, params):
super(FileBasedCache, self).__init__(params)
self._dir = os.path.abspath(dir)
self._createdir()
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
if self.has_key(key, version):
return False
self.set(key, value, timeout, version)
return True
def get(self, key, default=None, version=None):
fname = self._key_to_file(key, version)
try:
with io.open(fname, 'rb') as f:
if not self._is_expired(f):
return pickle.loads(zlib.decompress(f.read()))
except IOError as e:
if e.errno != errno.ENOENT:
raise
return default
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
self._createdir() # Cache dir can be deleted at any time.
fname = self._key_to_file(key, version)
self._cull() # make some room if necessary
fd, tmp_path = tempfile.mkstemp(dir=self._dir)
renamed = False
try:
with io.open(fd, 'wb') as f:
expiry = self.get_backend_timeout(timeout)
f.write(pickle.dumps(expiry, pickle.HIGHEST_PROTOCOL))
f.write(zlib.compress(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)))
file_move_safe(tmp_path, fname, allow_overwrite=True)
renamed = True
finally:
if not renamed:
os.remove(tmp_path)
def delete(self, key, version=None):
self._delete(self._key_to_file(key, version))
def _delete(self, fname):
if not fname.startswith(self._dir) or not os.path.exists(fname):
return
try:
os.remove(fname)
except OSError as e:
# ENOENT can happen if the cache file is removed (by another
# process) after the os.path.exists check.
if e.errno != errno.ENOENT:
raise
def has_key(self, key, version=None):
fname = self._key_to_file(key, version)
if os.path.exists(fname):
with io.open(fname, 'rb') as f:
return not self._is_expired(f)
return False
def _cull(self):
"""
Removes random cache entries if max_entries is reached at a ratio
of num_entries / cull_frequency. A value of 0 for CULL_FREQUENCY means
that the entire cache will be purged.
"""
filelist = self._list_cache_files()
num_entries = len(filelist)
if num_entries < self._max_entries:
return # return early if no culling is required
if self._cull_frequency == 0:
return self.clear() # Clear the cache when CULL_FREQUENCY = 0
# Delete a random selection of entries
filelist = random.sample(filelist,
int(num_entries / self._cull_frequency))
for fname in filelist:
self._delete(fname)
def _createdir(self):
if not os.path.exists(self._dir):
try:
os.makedirs(self._dir, 0o700)
except OSError as e:
if e.errno != errno.EEXIST:
raise EnvironmentError(
"Cache directory '%s' does not exist "
"and could not be created'" % self._dir)
def _key_to_file(self, key, version=None):
"""
Convert a key into a cache file path. Basically this is the
root cache path joined with the md5sum of the key and a suffix.
"""
key = self.make_key(key, version=version)
self.validate_key(key)
return os.path.join(self._dir, ''.join(
[hashlib.md5(force_bytes(key)).hexdigest(), self.cache_suffix]))
def clear(self):
"""
Remove all the cache files.
"""
if not os.path.exists(self._dir):
return
for fname in self._list_cache_files():
self._delete(fname)
def _is_expired(self, f):
"""
Takes an open cache file and determines if it has expired,
deletes the file if it is has passed its expiry time.
"""
exp = pickle.load(f)
if exp is not None and exp < time.time():
f.close() # On Windows a file has to be closed before deleting
self._delete(f.name)
return True
return False
def _list_cache_files(self):
"""
Get a list of paths to all the cache files. These are all the files
in the root cache dir that end on the cache_suffix.
"""
if not os.path.exists(self._dir):
return []
filelist = [os.path.join(self._dir, fname) for fname
in glob.glob1(self._dir, '*%s' % self.cache_suffix)]
return filelist
| mit | 2,680,916,587,182,116,400 | 33.764706 | 84 | 0.57492 | false |
secondstory/dewpoint | libcloud/providers.py | 1 | 2585 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provider related utilities
"""
from libcloud.types import Provider
DRIVERS = {
Provider.DUMMY:
('libcloud.drivers.dummy', 'DummyNodeDriver'),
Provider.EC2_US_EAST:
('libcloud.drivers.ec2', 'EC2NodeDriver'),
Provider.EC2_EU_WEST:
('libcloud.drivers.ec2', 'EC2EUNodeDriver'),
Provider.EC2_US_WEST:
('libcloud.drivers.ec2', 'EC2USWestNodeDriver'),
Provider.ECP:
('libcloud.drivers.ecp', 'ECPNodeDriver'),
Provider.GOGRID:
('libcloud.drivers.gogrid', 'GoGridNodeDriver'),
Provider.RACKSPACE:
('libcloud.drivers.rackspace', 'RackspaceNodeDriver'),
Provider.SLICEHOST:
('libcloud.drivers.slicehost', 'SlicehostNodeDriver'),
Provider.VPSNET:
('libcloud.drivers.vpsnet', 'VPSNetNodeDriver'),
Provider.LINODE:
('libcloud.drivers.linode', 'LinodeNodeDriver'),
Provider.RIMUHOSTING:
('libcloud.drivers.rimuhosting', 'RimuHostingNodeDriver'),
Provider.VOXEL:
('libcloud.drivers.voxel', 'VoxelNodeDriver'),
Provider.SOFTLAYER:
('libcloud.drivers.softlayer', 'SoftLayerNodeDriver'),
Provider.EUCALYPTUS:
('libcloud.drivers.ec2', 'EucNodeDriver'),
Provider.IBM:
('libcloud.drivers.ibm_sbc', 'IBMNodeDriver'),
Provider.OPENNEBULA:
('libcloud.drivers.opennebula', 'OpenNebulaNodeDriver'),
Provider.DREAMHOST:
('libcloud.drivers.dreamhost', 'DreamhostNodeDriver'),
}
def get_driver(provider):
"""Gets a driver
@param provider: Id of provider to get driver
@type provider: L{libcloud.types.Provider}
"""
if provider in DRIVERS:
mod_name, driver_name = DRIVERS[provider]
_mod = __import__(mod_name, globals(), locals(), [driver_name])
return getattr(_mod, driver_name)
| apache-2.0 | 7,735,479,051,597,986,000 | 38.166667 | 74 | 0.69323 | false |
bsmr-ansible/ansible-modules-extras | files/blockinfile.py | 7 | 9731 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, 2015 YAEGASHI Takeshi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
---
module: blockinfile
author:
- 'YAEGASHI Takeshi (@yaegashi)'
extends_documentation_fragment:
- files
- validate
short_description: Insert/update/remove a text block
surrounded by marker lines.
version_added: '2.0'
description:
- This module will insert/update/remove a block of multi-line text
surrounded by customizable marker lines.
notes:
- This module supports check mode.
- When using 'with_*' loops be aware that if you do not set a unique mark the block will be overwritten on each iteration.
options:
dest:
aliases: [ name, destfile ]
required: true
description:
- The file to modify.
state:
required: false
choices: [ present, absent ]
default: present
description:
- Whether the block should be there or not.
marker:
required: false
default: '# {mark} ANSIBLE MANAGED BLOCK'
description:
- The marker line template.
"{mark}" will be replaced with "BEGIN" or "END".
block:
aliases: [ content ]
required: false
default: ''
description:
- The text to insert inside the marker lines.
If it's missing or an empty string,
the block will be removed as if C(state) were specified to C(absent).
insertafter:
required: false
default: EOF
description:
- If specified, the block will be inserted after the last match of
specified regular expression. A special value is available; C(EOF) for
inserting the block at the end of the file. If specified regular
expresion has no matches, C(EOF) will be used instead.
choices: [ 'EOF', '*regex*' ]
insertbefore:
required: false
default: None
description:
- If specified, the block will be inserted before the last match of
specified regular expression. A special value is available; C(BOF) for
inserting the block at the beginning of the file. If specified regular
expresion has no matches, the block will be inserted at the end of the
file.
choices: [ 'BOF', '*regex*' ]
create:
required: false
default: 'no'
choices: [ 'yes', 'no' ]
description:
- Create a new file if it doesn't exist.
backup:
required: false
default: 'no'
choices: [ 'yes', 'no' ]
description:
- Create a backup file including the timestamp information so you can
get the original file back if you somehow clobbered it incorrectly.
follow:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- 'This flag indicates that filesystem links, if they exist, should be followed.'
version_added: "2.1"
"""
EXAMPLES = r"""
- name: insert/update "Match User" configuation block in /etc/ssh/sshd_config
blockinfile:
dest: /etc/ssh/sshd_config
block: |
Match User ansible-agent
PasswordAuthentication no
- name: insert/update eth0 configuration stanza in /etc/network/interfaces
(it might be better to copy files into /etc/network/interfaces.d/)
blockinfile:
dest: /etc/network/interfaces
block: |
iface eth0 inet static
address 192.168.0.1
netmask 255.255.255.0
- name: insert/update HTML surrounded by custom markers after <body> line
blockinfile:
dest: /var/www/html/index.html
marker: "<!-- {mark} ANSIBLE MANAGED BLOCK -->"
insertafter: "<body>"
content: |
<h1>Welcome to {{ansible_hostname}}</h1>
<p>Last updated on {{ansible_date_time.iso8601}}</p>
- name: remove HTML as well as surrounding markers
blockinfile:
dest: /var/www/html/index.html
marker: "<!-- {mark} ANSIBLE MANAGED BLOCK -->"
content: ""
- name: Add mappings to /etc/hosts
blockinfile:
dest: /etc/hosts
block: |
{{item.ip}} {{item.name}}
marker: "# {mark} ANSIBLE MANAGED BLOCK {{item.name}}"
with_items:
- { name: host1, ip: 10.10.1.10 }
- { name: host2, ip: 10.10.1.11 }
- { name: host3, ip: 10.10.1.12 }
"""
import re
import os
import tempfile
def write_changes(module, contents, dest):
tmpfd, tmpfile = tempfile.mkstemp()
f = os.fdopen(tmpfd, 'wb')
f.write(contents)
f.close()
validate = module.params.get('validate', None)
valid = not validate
if validate:
if "%s" not in validate:
module.fail_json(msg="validate must contain %%s: %s" % (validate))
(rc, out, err) = module.run_command(validate % tmpfile)
valid = rc == 0
if rc != 0:
module.fail_json(msg='failed to validate: '
'rc:%s error:%s' % (rc, err))
if valid:
module.atomic_move(tmpfile, dest)
def check_file_attrs(module, changed, message):
file_args = module.load_file_common_arguments(module.params)
if module.set_file_attributes_if_different(file_args, False):
if changed:
message += " and "
changed = True
message += "ownership, perms or SE linux context changed"
return message, changed
def main():
module = AnsibleModule(
argument_spec=dict(
dest=dict(required=True, aliases=['name', 'destfile'], type='path'),
state=dict(default='present', choices=['absent', 'present']),
marker=dict(default='# {mark} ANSIBLE MANAGED BLOCK', type='str'),
block=dict(default='', type='str', aliases=['content']),
insertafter=dict(default=None),
insertbefore=dict(default=None),
create=dict(default=False, type='bool'),
backup=dict(default=False, type='bool'),
validate=dict(default=None, type='str'),
),
mutually_exclusive=[['insertbefore', 'insertafter']],
add_file_common_args=True,
supports_check_mode=True
)
params = module.params
dest = params['dest']
if module.boolean(params.get('follow', None)):
dest = os.path.realpath(dest)
if os.path.isdir(dest):
module.fail_json(rc=256,
msg='Destination %s is a directory !' % dest)
if not os.path.exists(dest):
if not module.boolean(params['create']):
module.fail_json(rc=257,
msg='Destination %s does not exist !' % dest)
original = None
lines = []
else:
f = open(dest, 'rb')
original = f.read()
f.close()
lines = original.splitlines()
insertbefore = params['insertbefore']
insertafter = params['insertafter']
block = params['block']
marker = params['marker']
present = params['state'] == 'present'
if insertbefore is None and insertafter is None:
insertafter = 'EOF'
if insertafter not in (None, 'EOF'):
insertre = re.compile(insertafter)
elif insertbefore not in (None, 'BOF'):
insertre = re.compile(insertbefore)
else:
insertre = None
marker0 = re.sub(r'{mark}', 'BEGIN', marker)
marker1 = re.sub(r'{mark}', 'END', marker)
if present and block:
# Escape seqeuences like '\n' need to be handled in Ansible 1.x
if module.ansible_version.startswith('1.'):
block = re.sub('', block, '')
blocklines = [marker0] + block.splitlines() + [marker1]
else:
blocklines = []
n0 = n1 = None
for i, line in enumerate(lines):
if line.startswith(marker0):
n0 = i
if line.startswith(marker1):
n1 = i
if None in (n0, n1):
n0 = None
if insertre is not None:
for i, line in enumerate(lines):
if insertre.search(line):
n0 = i
if n0 is None:
n0 = len(lines)
elif insertafter is not None:
n0 += 1
elif insertbefore is not None:
n0 = 0 # insertbefore=BOF
else:
n0 = len(lines) # insertafter=EOF
elif n0 < n1:
lines[n0:n1+1] = []
else:
lines[n1:n0+1] = []
n0 = n1
lines[n0:n0] = blocklines
if lines:
result = '\n'.join(lines)
if original and original.endswith('\n'):
result += '\n'
else:
result = ''
if original == result:
msg = ''
changed = False
elif original is None:
msg = 'File created'
changed = True
elif not blocklines:
msg = 'Block removed'
changed = True
else:
msg = 'Block inserted'
changed = True
if changed and not module.check_mode:
if module.boolean(params['backup']) and os.path.exists(dest):
module.backup_local(dest)
write_changes(module, result, dest)
msg, changed = check_file_attrs(module, changed, msg)
module.exit_json(changed=changed, msg=msg)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.splitter import *
if __name__ == '__main__':
main()
| gpl-3.0 | -5,859,433,097,651,414,000 | 30.089457 | 124 | 0.60929 | false |
LightberryEu/plugin.program.hyperion.configurator | resources/lib/Led.py | 1 | 6398 | import json
class Led:
def __init__(self):
self.x_start = 0
self.x_end = 0
self.y_start = 0
self.y_end = 0
self.position = 0
self.color = bytearray([0,0,0])
def setCoordinates(self, in_x_start, in_x_end, in_y_start,in_y_end):
self.x_start = in_x_start
self.x_end = in_x_end
self.y_start = in_y_start
self.y_end = in_y_end
def printRaw(self):
print "led [" , self.position , "] - (" , self.x_start , " , " , self.x_end , ") , Y(", self.y_start , " , " , self.y_end , ")"
def hscan_to_dict(self):
"""returns dictionary for horizontal coordinates"""
return dict(minimum = round(self.x_start,4), maximum = round(self.x_end,4))
def vscan_to_dict(self):
"""returns dictionary for vertical coordinates"""
return dict(minimum = round(self.y_start,4), maximum = round(self.y_end,4))
def to_json_string(self):
return json.dumps(self.vscan_to_dict(),sort_keys=False,indent=4, separators=(',', ': '))
def set_color(self,red, green, blue):
if red > 255 or red < 0 or green > 255 or green < 0 or blue > 255 or blue < 0 :
raise "Incorrect values (must be between <0,255>"
else:
self.color = bytearray([red,green,blue])
class LedChain:
def __init__(self, no_of_leds):
self.number_of_leds = no_of_leds
self.leds = []
self.offset = 0
def generate_layout(self, nol_horizontal, nol_vertical, horizontal_depth, vertical_depth):
"""key method in this class - it calculates coordinates of picture scan area. As a result
there are Led instances created with coordinates assigned"""
self.nol_horizontal = nol_horizontal
self.nol_vertical = nol_vertical
area_top_coordinate = 0.0
area_bottom_coordinate = 0.0
area_left_coordinate = 0.0
area_right_coordinate = 0.0
self.vertical_segment = 1.0/nol_vertical
self.horizontal_segment = 1.0/nol_horizontal
for i in range(0,self.number_of_leds):
if i < nol_vertical: # right
vertical_position = i+1
area_top_coordinate = (1 -(self.vertical_segment * vertical_position))
area_left_coordinate = 1 - horizontal_depth;
area_right_coordinate = 1
area_bottom_coordinate = area_top_coordinate + self.vertical_segment
elif i >= nol_vertical and i < nol_vertical + nol_horizontal : #top
horizontal_position = nol_horizontal - (i - nol_vertical) - 1
area_left_coordinate = horizontal_position * self.horizontal_segment
area_top_coordinate = 0.0
area_bottom_coordinate = vertical_depth
area_right_coordinate = area_left_coordinate + self.horizontal_segment
elif i >= nol_vertical + nol_horizontal and i < nol_vertical + nol_horizontal + nol_vertical: #left
vertical_position = i - nol_vertical - nol_horizontal
area_top_coordinate = (0 +(self.vertical_segment * vertical_position))
area_left_coordinate = 0.0
area_right_coordinate = horizontal_depth
area_bottom_coordinate = area_top_coordinate + self.vertical_segment
else: # bottom
horizontal_position = i - nol_vertical - nol_horizontal - nol_vertical
area_top_coordinate = (1 - vertical_depth)
area_left_coordinate = horizontal_position * self.horizontal_segment
area_right_coordinate = area_left_coordinate + self.horizontal_segment
area_bottom_coordinate = 1
led = Led()
led.setCoordinates(area_left_coordinate,area_right_coordinate, area_top_coordinate,area_bottom_coordinate)
led.position = i
self.leds.append(led)
self.original_chain = list(self.leds) #make a copy of initial setup
def set_overlap(self,overlap_pct):
"""Use this method if you want to have leds scanning areas overlaping each other
(to loose some details of the effect, to make it smoother"""
self.horizontal_overlap = (overlap_pct / 100.0) * self.horizontal_segment
self.vertical_overlap = (overlap_pct / 100.0) * self.vertical_segment
for led in self.leds:
led.x_start = max(led.x_start - self.horizontal_overlap,0)
led.x_end = min(led.x_end + self.horizontal_overlap,1)
led.y_start = max(led.y_start - self.vertical_overlap,0)
led.y_end = min(led.y_end + self.vertical_overlap,1)
def reverse_direction(self):
"""Reverses leds direction from counterclockwise to clockwise"""
self.leds.reverse()
def left_bottom_start(self):
"""Moves the start of leds from right to left bottom corner for clockwise direction"""
def set_offset(self, offset_value):
"""it can be useful when your leds do not start at right/bottom corner, but, lets say, from the middle of bottom edge"""
if offset_value > 0:
for i in range(offset_value):
self.leds.append(self.leds.pop(0))
elif offset_value < 0:
for i in range((-1)*offset_value):
self.leds.insert(0,self.leds.pop(self.number_of_leds-1))
def print_me(self):
for i in range(0,len(self.leds)):
self.leds[i].printRaw()
def to_string(self):
for i in range(0,len(self.leds)):
self.leds[i].printRaw()
def leds_to_json_string(self):
"""Returns json string representing the leds"""
leds_array = []
for i in range(0,len(self.leds)):
leds_array.append(dict(index = i,hscan = self.leds[i].hscan_to_dict(), vscan = self.leds[i].vscan_to_dict()))
return json.dumps(leds_array,sort_keys=False,indent=4, separators=(',', ': '))
def get_list_of_leds_dicts(self):
"""Returns array of dicts leds"""
leds_array = []
for i in range(0,len(self.leds)):
leds_array.append(dict(index = i,hscan = self.leds[i].hscan_to_dict(), vscan = self.leds[i].vscan_to_dict()))
return leds_array
def leds_to_bytearray(self):
"""Converts leds' colors into bytearray. Useful if you want to send the data to the leds via hyperion interface"""
data = bytearray()
for led in self.leds:
data += led.color
return data
def set_single_color(self,red,green,blue):
"""Sets single color for all leds"""
if red > 255 or red < 0 or green > 255 or green < 0 or blue > 255 or blue < 0 :
raise "Incorrect values (must be between <0,255>)"
else:
for i in range(len(self.leds)):
self.leds[i].set_color(red, green, blue)
def add_extra_leds(self, no_of_extra_leds):
for i in range(0,no_of_extra_leds):
led=Led()
led.position=self.number_of_leds+i
self.leds.append(led)
#test
#t = LedChain(50)
#t.set_offset(5)
#t.to_string()
| gpl-2.0 | -684,550,313,806,654,500 | 37.012195 | 129 | 0.662863 | false |
rittersport3/CityWatchers | boilerplate/external/httplib2/__init__.py | 83 | 64159 | from __future__ import generators
"""
httplib2
A caching http interface that supports ETags and gzip
to conserve bandwidth.
Requires Python 2.3 or later
Changelog:
2007-08-18, Rick: Modified so it's able to use a socks proxy if needed.
"""
__author__ = "Joe Gregorio ([email protected])"
__copyright__ = "Copyright 2006, Joe Gregorio"
__contributors__ = ["Thomas Broyer ([email protected])",
"James Antill",
"Xavier Verges Farrero",
"Jonathan Feinberg",
"Blair Zajac",
"Sam Ruby",
"Louis Nyffenegger"]
__license__ = "MIT"
__version__ = "0.7.2"
import re
import sys
import email
import email.Utils
import email.Message
import email.FeedParser
import StringIO
import gzip
import zlib
import httplib
import urlparse
import base64
import os
import copy
import calendar
import time
import random
import errno
# remove depracated warning in python2.6
try:
from hashlib import sha1 as _sha, md5 as _md5
except ImportError:
import sha
import md5
_sha = sha.new
_md5 = md5.new
import hmac
from gettext import gettext as _
import socket
try:
from httplib2 import socks
except ImportError:
socks = None
# Build the appropriate socket wrapper for ssl
try:
import ssl # python 2.6
ssl_SSLError = ssl.SSLError
def _ssl_wrap_socket(sock, key_file, cert_file,
disable_validation, ca_certs):
if disable_validation:
cert_reqs = ssl.CERT_NONE
else:
cert_reqs = ssl.CERT_REQUIRED
# We should be specifying SSL version 3 or TLS v1, but the ssl module
# doesn't expose the necessary knobs. So we need to go with the default
# of SSLv23.
return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file,
cert_reqs=cert_reqs, ca_certs=ca_certs)
except (AttributeError, ImportError):
ssl_SSLError = None
def _ssl_wrap_socket(sock, key_file, cert_file,
disable_validation, ca_certs):
if not disable_validation:
raise CertificateValidationUnsupported(
"SSL certificate validation is not supported without "
"the ssl module installed. To avoid this error, install "
"the ssl module, or explicity disable validation.")
ssl_sock = socket.ssl(sock, key_file, cert_file)
return httplib.FakeSocket(sock, ssl_sock)
if sys.version_info >= (2,3):
from iri2uri import iri2uri
else:
def iri2uri(uri):
return uri
def has_timeout(timeout): # python 2.6
if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):
return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)
return (timeout is not None)
__all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error',
'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent',
'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError',
'debuglevel', 'ProxiesUnavailableError']
# The httplib debug level, set to a non-zero value to get debug output
debuglevel = 0
# Python 2.3 support
if sys.version_info < (2,4):
def sorted(seq):
seq.sort()
return seq
# Python 2.3 support
def HTTPResponse__getheaders(self):
"""Return list of (header, value) tuples."""
if self.msg is None:
raise httplib.ResponseNotReady()
return self.msg.items()
if not hasattr(httplib.HTTPResponse, 'getheaders'):
httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
# All exceptions raised here derive from HttpLib2Error
class HttpLib2Error(Exception): pass
# Some exceptions can be caught and optionally
# be turned back into responses.
class HttpLib2ErrorWithResponse(HttpLib2Error):
def __init__(self, desc, response, content):
self.response = response
self.content = content
HttpLib2Error.__init__(self, desc)
class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass
class RedirectLimit(HttpLib2ErrorWithResponse): pass
class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass
class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class MalformedHeader(HttpLib2Error): pass
class RelativeURIError(HttpLib2Error): pass
class ServerNotFoundError(HttpLib2Error): pass
class ProxiesUnavailableError(HttpLib2Error): pass
class CertificateValidationUnsupported(HttpLib2Error): pass
class SSLHandshakeError(HttpLib2Error): pass
class NotSupportedOnThisPlatform(HttpLib2Error): pass
class CertificateHostnameMismatch(SSLHandshakeError):
def __init__(self, desc, host, cert):
HttpLib2Error.__init__(self, desc)
self.host = host
self.cert = cert
# Open Items:
# -----------
# Proxy support
# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
# Pluggable cache storage (supports storing the cache in
# flat files by default. We need a plug-in architecture
# that can support Berkeley DB and Squid)
# == Known Issues ==
# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
# Does not handle Cache-Control: max-stale
# Does not use Age: headers when calculating cache freshness.
# The number of redirections to follow before giving up.
# Note that only GET redirects are automatically followed.
# Will also honor 301 requests by saving that info and never
# requesting that URI again.
DEFAULT_MAX_REDIRECTS = 5
# Default CA certificates file bundled with httplib2.
CA_CERTS = os.path.join(
os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt")
# Which headers are hop-by-hop headers by default
HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
def _get_end2end_headers(response):
hopbyhop = list(HOP_BY_HOP)
hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
return [header for header in response.keys() if header not in hopbyhop]
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
def parse_uri(uri):
"""Parses a URI using the regex given in Appendix B of RFC 3986.
(scheme, authority, path, query, fragment) = parse_uri(uri)
"""
groups = URI.match(uri).groups()
return (groups[1], groups[3], groups[4], groups[6], groups[8])
def urlnorm(uri):
(scheme, authority, path, query, fragment) = parse_uri(uri)
if not scheme or not authority:
raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
authority = authority.lower()
scheme = scheme.lower()
if not path:
path = "/"
# Could do syntax based normalization of the URI before
# computing the digest. See Section 6.2.2 of Std 66.
request_uri = query and "?".join([path, query]) or path
scheme = scheme.lower()
defrag_uri = scheme + "://" + authority + request_uri
return scheme, authority, request_uri, defrag_uri
# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
re_url_scheme = re.compile(r'^\w+://')
re_slash = re.compile(r'[?/:|]+')
def safename(filename):
"""Return a filename suitable for the cache.
Strips dangerous and common characters to create a filename we
can use to store the cache in.
"""
try:
if re_url_scheme.match(filename):
if isinstance(filename,str):
filename = filename.decode('utf-8')
filename = filename.encode('idna')
else:
filename = filename.encode('idna')
except UnicodeError:
pass
if isinstance(filename,unicode):
filename=filename.encode('utf-8')
filemd5 = _md5(filename).hexdigest()
filename = re_url_scheme.sub("", filename)
filename = re_slash.sub(",", filename)
# limit length of filename
if len(filename)>200:
filename=filename[:200]
return ",".join((filename, filemd5))
NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
def _normalize_headers(headers):
return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()])
def _parse_cache_control(headers):
retval = {}
if headers.has_key('cache-control'):
parts = headers['cache-control'].split(',')
parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
retval = dict(parts_with_args + parts_wo_args)
return retval
# Whether to use a strict mode to parse WWW-Authenticate headers
# Might lead to bad results in case of ill-formed header value,
# so disabled by default, falling back to relaxed parsing.
# Set to true to turn on, usefull for testing servers.
USE_WWW_AUTH_STRICT_PARSING = 0
# In regex below:
# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP
# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$")
WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$")
UNQUOTE_PAIRS = re.compile(r'\\(.)')
def _parse_www_authenticate(headers, headername='www-authenticate'):
"""Returns a dictionary of dictionaries, one dict
per auth_scheme."""
retval = {}
if headers.has_key(headername):
try:
authenticate = headers[headername].strip()
www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
while authenticate:
# Break off the scheme at the beginning of the line
if headername == 'authentication-info':
(auth_scheme, the_rest) = ('digest', authenticate)
else:
(auth_scheme, the_rest) = authenticate.split(" ", 1)
# Now loop over all the key value pairs that come after the scheme,
# being careful not to roll into the next scheme
match = www_auth.search(the_rest)
auth_params = {}
while match:
if match and len(match.groups()) == 3:
(key, value, the_rest) = match.groups()
auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
match = www_auth.search(the_rest)
retval[auth_scheme.lower()] = auth_params
authenticate = the_rest.strip()
except ValueError:
raise MalformedHeader("WWW-Authenticate")
return retval
def _entry_disposition(response_headers, request_headers):
"""Determine freshness from the Date, Expires and Cache-Control headers.
We don't handle the following:
1. Cache-Control: max-stale
2. Age: headers are not used in the calculations.
Not that this algorithm is simpler than you might think
because we are operating as a private (non-shared) cache.
This lets us ignore 's-maxage'. We can also ignore
'proxy-invalidate' since we aren't a proxy.
We will never return a stale document as
fresh as a design decision, and thus the non-implementation
of 'max-stale'. This also lets us safely ignore 'must-revalidate'
since we operate as if every server has sent 'must-revalidate'.
Since we are private we get to ignore both 'public' and
'private' parameters. We also ignore 'no-transform' since
we don't do any transformations.
The 'no-store' parameter is handled at a higher level.
So the only Cache-Control parameters we look at are:
no-cache
only-if-cached
max-age
min-fresh
"""
retval = "STALE"
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1:
retval = "TRANSPARENT"
if 'cache-control' not in request_headers:
request_headers['cache-control'] = 'no-cache'
elif cc.has_key('no-cache'):
retval = "TRANSPARENT"
elif cc_response.has_key('no-cache'):
retval = "STALE"
elif cc.has_key('only-if-cached'):
retval = "FRESH"
elif response_headers.has_key('date'):
date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date']))
now = time.time()
current_age = max(0, now - date)
if cc_response.has_key('max-age'):
try:
freshness_lifetime = int(cc_response['max-age'])
except ValueError:
freshness_lifetime = 0
elif response_headers.has_key('expires'):
expires = email.Utils.parsedate_tz(response_headers['expires'])
if None == expires:
freshness_lifetime = 0
else:
freshness_lifetime = max(0, calendar.timegm(expires) - date)
else:
freshness_lifetime = 0
if cc.has_key('max-age'):
try:
freshness_lifetime = int(cc['max-age'])
except ValueError:
freshness_lifetime = 0
if cc.has_key('min-fresh'):
try:
min_fresh = int(cc['min-fresh'])
except ValueError:
min_fresh = 0
current_age += min_fresh
if freshness_lifetime > current_age:
retval = "FRESH"
return retval
def _decompressContent(response, new_content):
content = new_content
try:
encoding = response.get('content-encoding', None)
if encoding in ['gzip', 'deflate']:
if encoding == 'gzip':
content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
if encoding == 'deflate':
content = zlib.decompress(content)
response['content-length'] = str(len(content))
# Record the historical presence of the encoding in a way the won't interfere.
response['-content-encoding'] = response['content-encoding']
del response['content-encoding']
except IOError:
content = ""
raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content)
return content
def _updateCache(request_headers, response_headers, content, cache, cachekey):
if cachekey:
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
if cc.has_key('no-store') or cc_response.has_key('no-store'):
cache.delete(cachekey)
else:
info = email.Message.Message()
for key, value in response_headers.iteritems():
if key not in ['status','content-encoding','transfer-encoding']:
info[key] = value
# Add annotations to the cache to indicate what headers
# are variant for this request.
vary = response_headers.get('vary', None)
if vary:
vary_headers = vary.lower().replace(' ', '').split(',')
for header in vary_headers:
key = '-varied-%s' % header
try:
info[key] = request_headers[header]
except KeyError:
pass
status = response_headers.status
if status == 304:
status = 200
status_header = 'status: %d\r\n' % status
header_str = info.as_string()
header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
text = "".join([status_header, header_str, content])
cache.set(cachekey, text)
def _cnonce():
dig = _md5("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest()
return dig[:16]
def _wsse_username_token(cnonce, iso_now, password):
return base64.b64encode(_sha("%s%s%s" % (cnonce, iso_now, password)).digest()).strip()
# For credentials we need two things, first
# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
# Then we also need a list of URIs that have already demanded authentication
# That list is tricky since sub-URIs can take the same auth, or the
# auth scheme may change as you descend the tree.
# So we also need each Auth instance to be able to tell us
# how close to the 'top' it is.
class Authentication(object):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
self.path = path
self.host = host
self.credentials = credentials
self.http = http
def depth(self, request_uri):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
return request_uri[len(self.path):].count("/")
def inscope(self, host, request_uri):
# XXX Should we normalize the request_uri?
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
return (host == self.host) and path.startswith(self.path)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header. Over-rise this in sub-classes."""
pass
def response(self, response, content):
"""Gives us a chance to update with new nonces
or such returned from the last authorized response.
Over-rise this in sub-classes if necessary.
Return TRUE is the request is to be retried, for
example Digest may return stale=true.
"""
return False
class BasicAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'Basic ' + base64.b64encode("%s:%s" % self.credentials).strip()
class DigestAuthentication(Authentication):
"""Only do qop='auth' and MD5, since that
is all Apache currently implements"""
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
self.challenge = challenge['digest']
qop = self.challenge.get('qop', 'auth')
self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None
if self.challenge['qop'] is None:
raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop))
self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper()
if self.challenge['algorithm'] != 'MD5':
raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
self.challenge['nc'] = 1
def request(self, method, request_uri, headers, content, cnonce = None):
"""Modify the request headers"""
H = lambda x: _md5(x).hexdigest()
KD = lambda s, d: H("%s:%s" % (s, d))
A2 = "".join([method, ":", request_uri])
self.challenge['cnonce'] = cnonce or _cnonce()
request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (self.challenge['nonce'],
'%08x' % self.challenge['nc'],
self.challenge['cnonce'],
self.challenge['qop'], H(A2)
))
headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
self.credentials[0],
self.challenge['realm'],
self.challenge['nonce'],
request_uri,
self.challenge['algorithm'],
request_digest,
self.challenge['qop'],
self.challenge['nc'],
self.challenge['cnonce'],
)
if self.challenge.get('opaque'):
headers['authorization'] += ', opaque="%s"' % self.challenge['opaque']
self.challenge['nc'] += 1
def response(self, response, content):
if not response.has_key('authentication-info'):
challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
if 'true' == challenge.get('stale'):
self.challenge['nonce'] = challenge['nonce']
self.challenge['nc'] = 1
return True
else:
updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
if updated_challenge.has_key('nextnonce'):
self.challenge['nonce'] = updated_challenge['nextnonce']
self.challenge['nc'] = 1
return False
class HmacDigestAuthentication(Authentication):
"""Adapted from Robert Sayre's code and DigestAuthentication above."""
__author__ = "Thomas Broyer ([email protected])"
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
self.challenge = challenge['hmacdigest']
# TODO: self.challenge['domain']
self.challenge['reason'] = self.challenge.get('reason', 'unauthorized')
if self.challenge['reason'] not in ['unauthorized', 'integrity']:
self.challenge['reason'] = 'unauthorized'
self.challenge['salt'] = self.challenge.get('salt', '')
if not self.challenge.get('snonce'):
raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty."))
self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1')
if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']:
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1')
if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']:
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm']))
if self.challenge['algorithm'] == 'HMAC-MD5':
self.hashmod = _md5
else:
self.hashmod = _sha
if self.challenge['pw-algorithm'] == 'MD5':
self.pwhashmod = _md5
else:
self.pwhashmod = _sha
self.key = "".join([self.credentials[0], ":",
self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
":", self.challenge['realm']
])
self.key = self.pwhashmod.new(self.key).hexdigest().lower()
def request(self, method, request_uri, headers, content):
"""Modify the request headers"""
keys = _get_end2end_headers(headers)
keylist = "".join(["%s " % k for k in keys])
headers_val = "".join([headers[k] for k in keys])
created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime())
cnonce = _cnonce()
request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val)
request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (
self.credentials[0],
self.challenge['realm'],
self.challenge['snonce'],
cnonce,
request_uri,
created,
request_digest,
keylist,
)
def response(self, response, content):
challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {})
if challenge.get('reason') in ['integrity', 'stale']:
return True
return False
class WsseAuthentication(Authentication):
"""This is thinly tested and should not be relied upon.
At this time there isn't any third party server to test against.
Blogger and TypePad implemented this algorithm at one point
but Blogger has since switched to Basic over HTTPS and
TypePad has implemented it wrong, by never issuing a 401
challenge but instead requiring your client to telepathically know that
their endpoint is expecting WSSE profile="UsernameToken"."""
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'WSSE profile="UsernameToken"'
iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
cnonce = _cnonce()
password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (
self.credentials[0],
password_digest,
cnonce,
iso_now)
class GoogleLoginAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
from urllib import urlencode
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
service = challenge['googlelogin'].get('service', 'xapi')
# Bloggger actually returns the service in the challenge
# For the rest we guess based on the URI
if service == 'xapi' and request_uri.find("calendar") > 0:
service = "cl"
# No point in guessing Base or Spreadsheet
#elif request_uri.find("spreadsheets") > 0:
# service = "wise"
auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent'])
resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'})
lines = content.split('\n')
d = dict([tuple(line.split("=", 1)) for line in lines if line])
if resp.status == 403:
self.Auth = ""
else:
self.Auth = d['Auth']
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
AUTH_SCHEME_CLASSES = {
"basic": BasicAuthentication,
"wsse": WsseAuthentication,
"digest": DigestAuthentication,
"hmacdigest": HmacDigestAuthentication,
"googlelogin": GoogleLoginAuthentication
}
AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
class FileCache(object):
"""Uses a local directory as a store for cached files.
Not really safe to use if multiple threads or processes are going to
be running on the same cache.
"""
def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
self.cache = cache
self.safe = safe
if not os.path.exists(cache):
os.makedirs(self.cache)
def get(self, key):
retval = None
cacheFullPath = os.path.join(self.cache, self.safe(key))
try:
f = file(cacheFullPath, "rb")
retval = f.read()
f.close()
except IOError:
pass
return retval
def set(self, key, value):
cacheFullPath = os.path.join(self.cache, self.safe(key))
f = file(cacheFullPath, "wb")
f.write(value)
f.close()
def delete(self, key):
cacheFullPath = os.path.join(self.cache, self.safe(key))
if os.path.exists(cacheFullPath):
os.remove(cacheFullPath)
class Credentials(object):
def __init__(self):
self.credentials = []
def add(self, name, password, domain=""):
self.credentials.append((domain.lower(), name, password))
def clear(self):
self.credentials = []
def iter(self, domain):
for (cdomain, name, password) in self.credentials:
if cdomain == "" or domain == cdomain:
yield (name, password)
class KeyCerts(Credentials):
"""Identical to Credentials except that
name/password are mapped to key/cert."""
pass
class ProxyInfo(object):
"""Collect information required to use a proxy."""
def __init__(self, proxy_type, proxy_host, proxy_port, proxy_rdns=None, proxy_user=None, proxy_pass=None):
"""The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX
constants. For example:
p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost', proxy_port=8000)
"""
self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass = proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass
def astuple(self):
return (self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns,
self.proxy_user, self.proxy_pass)
def isgood(self):
return (self.proxy_host != None) and (self.proxy_port != None)
class HTTPConnectionWithTimeout(httplib.HTTPConnection):
"""
HTTPConnection subclass that supports timeouts
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None):
httplib.HTTPConnection.__init__(self, host, port, strict)
self.timeout = timeout
self.proxy_info = proxy_info
def connect(self):
"""Connect to the host and port specified in __init__."""
# Mostly verbatim from httplib.py.
if self.proxy_info and socks is None:
raise ProxiesUnavailableError(
'Proxy support missing but proxy use was requested!')
msg = "getaddrinfo returns an empty list"
for res in socket.getaddrinfo(self.host, self.port, 0,
socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
try:
if self.proxy_info and self.proxy_info.isgood():
self.sock = socks.socksocket(af, socktype, proto)
self.sock.setproxy(*self.proxy_info.astuple())
else:
self.sock = socket.socket(af, socktype, proto)
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
# Different from httplib: support timeouts.
if has_timeout(self.timeout):
self.sock.settimeout(self.timeout)
# End of difference from httplib.
if self.debuglevel > 0:
print "connect: (%s, %s)" % (self.host, self.port)
self.sock.connect(sa)
except socket.error, msg:
if self.debuglevel > 0:
print 'connect fail:', (self.host, self.port)
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error, msg
class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
"""
This class allows communication via SSL.
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None,
ca_certs=None, disable_ssl_certificate_validation=False):
httplib.HTTPSConnection.__init__(self, host, port=port, key_file=key_file,
cert_file=cert_file, strict=strict)
self.timeout = timeout
self.proxy_info = proxy_info
if ca_certs is None:
ca_certs = CA_CERTS
self.ca_certs = ca_certs
self.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
# The following two methods were adapted from https_wrapper.py, released
# with the Google Appengine SDK at
# http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py
# under the following license:
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def _GetValidHostsForCert(self, cert):
"""Returns a list of valid host globs for an SSL certificate.
Args:
cert: A dictionary representing an SSL certificate.
Returns:
list: A list of valid host globs.
"""
if 'subjectAltName' in cert:
return [x[1] for x in cert['subjectAltName']
if x[0].lower() == 'dns']
else:
return [x[0][1] for x in cert['subject']
if x[0][0].lower() == 'commonname']
def _ValidateCertificateHostname(self, cert, hostname):
"""Validates that a given hostname is valid for an SSL certificate.
Args:
cert: A dictionary representing an SSL certificate.
hostname: The hostname to test.
Returns:
bool: Whether or not the hostname is valid for this certificate.
"""
hosts = self._GetValidHostsForCert(cert)
for host in hosts:
host_re = host.replace('.', '\.').replace('*', '[^.]*')
if re.search('^%s$' % (host_re,), hostname, re.I):
return True
return False
def connect(self):
"Connect to a host on a given (SSL) port."
msg = "getaddrinfo returns an empty list"
for family, socktype, proto, canonname, sockaddr in socket.getaddrinfo(
self.host, self.port, 0, socket.SOCK_STREAM):
try:
if self.proxy_info and self.proxy_info.isgood():
sock = socks.socksocket(family, socktype, proto)
sock.setproxy(*self.proxy_info.astuple())
else:
sock = socket.socket(family, socktype, proto)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if has_timeout(self.timeout):
sock.settimeout(self.timeout)
sock.connect((self.host, self.port))
self.sock =_ssl_wrap_socket(
sock, self.key_file, self.cert_file,
self.disable_ssl_certificate_validation, self.ca_certs)
if self.debuglevel > 0:
print "connect: (%s, %s)" % (self.host, self.port)
if not self.disable_ssl_certificate_validation:
cert = self.sock.getpeercert()
hostname = self.host.split(':', 0)[0]
if not self._ValidateCertificateHostname(cert, hostname):
raise CertificateHostnameMismatch(
'Server presented certificate that does not match '
'host %s: %s' % (hostname, cert), hostname, cert)
except ssl_SSLError, e:
if sock:
sock.close()
if self.sock:
self.sock.close()
self.sock = None
# Unfortunately the ssl module doesn't seem to provide any way
# to get at more detailed error information, in particular
# whether the error is due to certificate validation or
# something else (such as SSL protocol mismatch).
if e.errno == ssl.SSL_ERROR_SSL:
raise SSLHandshakeError(e)
else:
raise
except (socket.timeout, socket.gaierror):
raise
except socket.error, msg:
if self.debuglevel > 0:
print 'connect fail:', (self.host, self.port)
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error, msg
SCHEME_TO_CONNECTION = {
'http': HTTPConnectionWithTimeout,
'https': HTTPSConnectionWithTimeout
}
# Use a different connection object for Google App Engine
try:
from google.appengine.api import apiproxy_stub_map
if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
raise ImportError # Bail out; we're not actually running on App Engine.
from google.appengine.api.urlfetch import fetch
from google.appengine.api.urlfetch import InvalidURLError
from google.appengine.api.urlfetch import DownloadError
from google.appengine.api.urlfetch import ResponseTooLargeError
from google.appengine.api.urlfetch import SSLCertificateError
class ResponseDict(dict):
"""Is a dictionary that also has a read() method, so
that it can pass itself off as an httlib.HTTPResponse()."""
def read(self):
pass
class AppEngineHttpConnection(object):
"""Emulates an httplib.HTTPConnection object, but actually uses the Google
App Engine urlfetch library. This allows the timeout to be properly used on
Google App Engine, and avoids using httplib, which on Google App Engine is
just another wrapper around urlfetch.
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, ca_certs=None,
disable_certificate_validation=False):
self.host = host
self.port = port
self.timeout = timeout
if key_file or cert_file or proxy_info or ca_certs:
raise NotSupportedOnThisPlatform()
self.response = None
self.scheme = 'http'
self.validate_certificate = not disable_certificate_validation
self.sock = True
def request(self, method, url, body, headers):
# Calculate the absolute URI, which fetch requires
netloc = self.host
if self.port:
netloc = '%s:%s' % (self.host, self.port)
absolute_uri = '%s://%s%s' % (self.scheme, netloc, url)
try:
response = fetch(absolute_uri, payload=body, method=method,
headers=headers, allow_truncated=False, follow_redirects=False,
deadline=self.timeout,
validate_certificate=self.validate_certificate)
self.response = ResponseDict(response.headers)
self.response['status'] = str(response.status_code)
self.response.status = response.status_code
setattr(self.response, 'read', lambda : response.content)
# Make sure the exceptions raised match the exceptions expected.
except InvalidURLError:
raise socket.gaierror('')
except (DownloadError, ResponseTooLargeError, SSLCertificateError):
raise httplib.HTTPException()
def getresponse(self):
if self.response:
return self.response
else:
raise httplib.HTTPException()
def set_debuglevel(self, level):
pass
def connect(self):
pass
def close(self):
pass
class AppEngineHttpsConnection(AppEngineHttpConnection):
"""Same as AppEngineHttpConnection, but for HTTPS URIs."""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None):
AppEngineHttpConnection.__init__(self, host, port, key_file, cert_file,
strict, timeout, proxy_info)
self.scheme = 'https'
# Update the connection classes to use the Googel App Engine specific ones.
SCHEME_TO_CONNECTION = {
'http': AppEngineHttpConnection,
'https': AppEngineHttpsConnection
}
except ImportError:
pass
class Http(object):
"""An HTTP client that handles:
- all methods
- caching
- ETags
- compression,
- HTTPS
- Basic
- Digest
- WSSE
and more.
"""
def __init__(self, cache=None, timeout=None, proxy_info=None,
ca_certs=None, disable_ssl_certificate_validation=False):
"""
The value of proxy_info is a ProxyInfo instance.
If 'cache' is a string then it is used as a directory name for
a disk cache. Otherwise it must be an object that supports the
same interface as FileCache.
All timeouts are in seconds. If None is passed for timeout
then Python's default timeout for sockets will be used. See
for example the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
ca_certs is the path of a file containing root CA certificates for SSL
server certificate validation. By default, a CA cert file bundled with
httplib2 is used.
If disable_ssl_certificate_validation is true, SSL cert validation will
not be performed.
"""
self.proxy_info = proxy_info
self.ca_certs = ca_certs
self.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
# Map domain name to an httplib connection
self.connections = {}
# The location of the cache, for now a directory
# where cached responses are held.
if cache and isinstance(cache, basestring):
self.cache = FileCache(cache)
else:
self.cache = cache
# Name/password
self.credentials = Credentials()
# Key/cert
self.certificates = KeyCerts()
# authorization objects
self.authorizations = []
# If set to False then no redirects are followed, even safe ones.
self.follow_redirects = True
# Which HTTP methods do we apply optimistic concurrency to, i.e.
# which methods get an "if-match:" etag header added to them.
self.optimistic_concurrency_methods = ["PUT", "PATCH"]
# If 'follow_redirects' is True, and this is set to True then
# all redirecs are followed, including unsafe ones.
self.follow_all_redirects = False
self.ignore_etag = False
self.force_exception_to_status_code = False
self.timeout = timeout
def _auth_from_challenge(self, host, request_uri, headers, response, content):
"""A generator that creates Authorization objects
that can be applied to requests.
"""
challenges = _parse_www_authenticate(response, 'www-authenticate')
for cred in self.credentials.iter(host):
for scheme in AUTH_SCHEME_ORDER:
if challenges.has_key(scheme):
yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
def add_credentials(self, name, password, domain=""):
"""Add a name and password that will be used
any time a request requires authentication."""
self.credentials.add(name, password, domain)
def add_certificate(self, key, cert, domain):
"""Add a key and cert that will be used
any time a request requires authentication."""
self.certificates.add(key, cert, domain)
def clear_credentials(self):
"""Remove all the names and passwords
that are used for authentication"""
self.credentials.clear()
self.authorizations = []
def _conn_request(self, conn, request_uri, method, body, headers):
for i in range(2):
try:
if conn.sock is None:
conn.connect()
conn.request(method, request_uri, body, headers)
except socket.timeout:
raise
except socket.gaierror:
conn.close()
raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
except ssl_SSLError:
conn.close()
raise
except socket.error, e:
err = 0
if hasattr(e, 'args'):
err = getattr(e, 'args')[0]
else:
err = e.errno
if err == errno.ECONNREFUSED: # Connection refused
raise
except httplib.HTTPException:
# Just because the server closed the connection doesn't apparently mean
# that the server didn't send a response.
if conn.sock is None:
if i == 0:
conn.close()
conn.connect()
continue
else:
conn.close()
raise
if i == 0:
conn.close()
conn.connect()
continue
try:
response = conn.getresponse()
except (socket.error, httplib.HTTPException):
if i == 0:
conn.close()
conn.connect()
continue
else:
raise
else:
content = ""
if method == "HEAD":
response.close()
else:
content = response.read()
response = Response(response)
if method != "HEAD":
content = _decompressContent(response, content)
break
return (response, content)
def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey):
"""Do the actual request using the connection object
and also follow one level of redirects if necessary"""
auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
auth = auths and sorted(auths)[0][1] or None
if auth:
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers)
if auth:
if auth.response(response, body):
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers )
response._stale_digest = 1
if response.status == 401:
for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
authorization.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers, )
if response.status != 401:
self.authorizations.append(authorization)
authorization.response(response, body)
break
if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303):
if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
# Pick out the location header and basically start from the beginning
# remembering first to strip the ETag header and decrement our 'depth'
if redirections:
if not response.has_key('location') and response.status != 300:
raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
# Fix-up relative redirects (which violate an RFC 2616 MUST)
if response.has_key('location'):
location = response['location']
(scheme, authority, path, query, fragment) = parse_uri(location)
if authority == None:
response['location'] = urlparse.urljoin(absolute_uri, location)
if response.status == 301 and method in ["GET", "HEAD"]:
response['-x-permanent-redirect-url'] = response['location']
if not response.has_key('content-location'):
response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
if headers.has_key('if-none-match'):
del headers['if-none-match']
if headers.has_key('if-modified-since'):
del headers['if-modified-since']
if response.has_key('location'):
location = response['location']
old_response = copy.deepcopy(response)
if not old_response.has_key('content-location'):
old_response['content-location'] = absolute_uri
redirect_method = method
if response.status in [302, 303]:
redirect_method = "GET"
body = None
(response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1)
response.previous = old_response
else:
raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content)
elif response.status in [200, 203] and method in ["GET", "HEAD"]:
# Don't cache 206's since we aren't going to handle byte range requests
if not response.has_key('content-location'):
response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
return (response, content)
def _normalize_headers(self, headers):
return _normalize_headers(headers)
# Need to catch and rebrand some exceptions
# Then need to optionally turn all exceptions into status codes
# including all socket.* and httplib.* exceptions.
def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None):
""" Performs a single HTTP request.
The 'uri' is the URI of the HTTP resource and can begin
with either 'http' or 'https'. The value of 'uri' must be an absolute URI.
The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc.
There is no restriction on the methods allowed.
The 'body' is the entity body to be sent with the request. It is a string
object.
Any extra headers that are to be sent with the request should be provided in the
'headers' dictionary.
The maximum number of redirect to follow before raising an
exception is 'redirections. The default is 5.
The return value is a tuple of (response, content), the first
being and instance of the 'Response' class, the second being
a string that contains the response entity body.
"""
try:
if headers is None:
headers = {}
else:
headers = self._normalize_headers(headers)
if not headers.has_key('user-agent'):
headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
uri = iri2uri(uri)
(scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
domain_port = authority.split(":")[0:2]
if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http':
scheme = 'https'
authority = domain_port[0]
conn_key = scheme+":"+authority
if conn_key in self.connections:
conn = self.connections[conn_key]
else:
if not connection_type:
connection_type = SCHEME_TO_CONNECTION[scheme]
certs = list(self.certificates.iter(authority))
if issubclass(connection_type, HTTPSConnectionWithTimeout):
if certs:
conn = self.connections[conn_key] = connection_type(
authority, key_file=certs[0][0],
cert_file=certs[0][1], timeout=self.timeout,
proxy_info=self.proxy_info,
ca_certs=self.ca_certs,
disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation)
else:
conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout,
proxy_info=self.proxy_info,
ca_certs=self.ca_certs,
disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation)
else:
conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout,
proxy_info=self.proxy_info)
conn.set_debuglevel(debuglevel)
if 'range' not in headers and 'accept-encoding' not in headers:
headers['accept-encoding'] = 'gzip, deflate'
info = email.Message.Message()
cached_value = None
if self.cache:
cachekey = defrag_uri
cached_value = self.cache.get(cachekey)
if cached_value:
# info = email.message_from_string(cached_value)
#
# Need to replace the line above with the kludge below
# to fix the non-existent bug not fixed in this
# bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html
try:
info, content = cached_value.split('\r\n\r\n', 1)
feedparser = email.FeedParser.FeedParser()
feedparser.feed(info)
info = feedparser.close()
feedparser._parse = None
except IndexError:
self.cache.delete(cachekey)
cachekey = None
cached_value = None
else:
cachekey = None
if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers:
# http://www.w3.org/1999/04/Editing/
headers['if-match'] = info['etag']
if method not in ["GET", "HEAD"] and self.cache and cachekey:
# RFC 2616 Section 13.10
self.cache.delete(cachekey)
# Check the vary header in the cache to see if this request
# matches what varies in the cache.
if method in ['GET', 'HEAD'] and 'vary' in info:
vary = info['vary']
vary_headers = vary.lower().replace(' ', '').split(',')
for header in vary_headers:
key = '-varied-%s' % header
value = info[key]
if headers.get(header, None) != value:
cached_value = None
break
if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
if info.has_key('-x-permanent-redirect-url'):
# Should cached permanent redirects be counted in our redirection count? For now, yes.
if redirections <= 0:
raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "")
(response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1)
response.previous = Response(info)
response.previous.fromcache = True
else:
# Determine our course of action:
# Is the cached entry fresh or stale?
# Has the client requested a non-cached response?
#
# There seems to be three possible answers:
# 1. [FRESH] Return the cache entry w/o doing a GET
# 2. [STALE] Do the GET (but add in cache validators if available)
# 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
entry_disposition = _entry_disposition(info, headers)
if entry_disposition == "FRESH":
if not cached_value:
info['status'] = '504'
content = ""
response = Response(info)
if cached_value:
response.fromcache = True
return (response, content)
if entry_disposition == "STALE":
if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers:
headers['if-none-match'] = info['etag']
if info.has_key('last-modified') and not 'last-modified' in headers:
headers['if-modified-since'] = info['last-modified']
elif entry_disposition == "TRANSPARENT":
pass
(response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
if response.status == 304 and method == "GET":
# Rewrite the cache entry with the new end-to-end headers
# Take all headers that are in response
# and overwrite their values in info.
# unless they are hop-by-hop, or are listed in the connection header.
for key in _get_end2end_headers(response):
info[key] = response[key]
merged_response = Response(info)
if hasattr(response, "_stale_digest"):
merged_response._stale_digest = response._stale_digest
_updateCache(headers, merged_response, content, self.cache, cachekey)
response = merged_response
response.status = 200
response.fromcache = True
elif response.status == 200:
content = new_content
else:
self.cache.delete(cachekey)
content = new_content
else:
cc = _parse_cache_control(headers)
if cc.has_key('only-if-cached'):
info['status'] = '504'
response = Response(info)
content = ""
else:
(response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
except Exception, e:
if self.force_exception_to_status_code:
if isinstance(e, HttpLib2ErrorWithResponse):
response = e.response
content = e.content
response.status = 500
response.reason = str(e)
elif isinstance(e, socket.timeout):
content = "Request Timeout"
response = Response( {
"content-type": "text/plain",
"status": "408",
"content-length": len(content)
})
response.reason = "Request Timeout"
else:
content = str(e)
response = Response( {
"content-type": "text/plain",
"status": "400",
"content-length": len(content)
})
response.reason = "Bad Request"
else:
raise
return (response, content)
class Response(dict):
"""An object more like email.Message than httplib.HTTPResponse."""
"""Is this response from our local cache"""
fromcache = False
"""HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """
version = 11
"Status code returned by server. "
status = 200
"""Reason phrase returned by server."""
reason = "Ok"
previous = None
def __init__(self, info):
# info is either an email.Message or
# an httplib.HTTPResponse object.
if isinstance(info, httplib.HTTPResponse):
for key, value in info.getheaders():
self[key.lower()] = value
self.status = info.status
self['status'] = str(self.status)
self.reason = info.reason
self.version = info.version
elif isinstance(info, email.Message.Message):
for key, value in info.items():
self[key] = value
self.status = int(self['status'])
else:
for key, value in info.iteritems():
self[key] = value
self.status = int(self.get('status', self.status))
def __getattr__(self, name):
if name == 'dict':
return self
else:
raise AttributeError, name
| lgpl-3.0 | -1,662,337,992,665,416,700 | 41.265481 | 235 | 0.587478 | false |
liaorubei/depot_tools | third_party/gsutil/gslib/addlhelp/prod.py | 51 | 8580 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from gslib.help_provider import HELP_NAME
from gslib.help_provider import HELP_NAME_ALIASES
from gslib.help_provider import HELP_ONE_LINE_SUMMARY
from gslib.help_provider import HelpProvider
from gslib.help_provider import HELP_TEXT
from gslib.help_provider import HelpType
from gslib.help_provider import HELP_TYPE
_detailed_help_text = ("""
<B>OVERVIEW</B>
If you use gsutil in large production tasks (such as uploading or
downloading many GB of data each night), there are a number of things
you can do to help ensure success. Specifically, this section discusses
how to script large production tasks around gsutil's resumable transfer
mechanism.
<B>BACKGROUND ON RESUMABLE TRANSFERS</B>
First, it's helpful to understand gsutil's resumable transfer mechanism,
and how your script needs to be implemented around this mechanism to work
reliably. gsutil uses the resumable transfer support in the boto library
when you attempt to upload or download a file larger than a configurable
threshold (by default, this threshold is 1MB). When a transfer fails
partway through (e.g., because of an intermittent network problem),
boto uses a randomized binary exponential backoff-and-retry strategy:
wait a random period between [0..1] seconds and retry; if that fails,
wait a random period between [0..2] seconds and retry; and if that
fails, wait a random period between [0..4] seconds, and so on, up to a
configurable number of times (the default is 6 times). Thus, the retry
actually spans a randomized period up to 1+2+4+8+16+32=63 seconds.
If the transfer fails each of these attempts with no intervening
progress, gsutil gives up on the transfer, but keeps a "tracker" file
for it in a configurable location (the default location is ~/.gsutil/,
in a file named by a combination of the SHA1 hash of the name of the
bucket and object being transferred and the last 16 characters of the
file name). When transfers fail in this fashion, you can rerun gsutil
at some later time (e.g., after the networking problem has been
resolved), and the resumable transfer picks up where it left off.
<B>SCRIPTING DATA TRANSFER TASKS</B>
To script large production data transfer tasks around this mechanism,
you can implement a script that runs periodically, determines which file
transfers have not yet succeeded, and runs gsutil to copy them. Below,
we offer a number of suggestions about how this type of scripting should
be implemented:
1. When resumable transfers fail without any progress 6 times in a row
over the course of up to 63 seconds, it probably won't work to simply
retry the transfer immediately. A more successful strategy would be to
have a cron job that runs every 30 minutes, determines which transfers
need to be run, and runs them. If the network experiences intermittent
problems, the script picks up where it left off and will eventually
succeed (once the network problem has been resolved).
2. If your business depends on timely data transfer, you should consider
implementing some network monitoring. For example, you can implement
a task that attempts a small download every few minutes and raises an
alert if the attempt fails for several attempts in a row (or more or less
frequently depending on your requirements), so that your IT staff can
investigate problems promptly. As usual with monitoring implementations,
you should experiment with the alerting thresholds, to avoid false
positive alerts that cause your staff to begin ignoring the alerts.
3. There are a variety of ways you can determine what files remain to be
transferred. We recommend that you avoid attempting to get a complete
listing of a bucket containing many objects (e.g., tens of thousands
or more). One strategy is to structure your object names in a way that
represents your transfer process, and use gsutil prefix wildcards to
request partial bucket listings. For example, if your periodic process
involves downloading the current day's objects, you could name objects
using a year-month-day-object-ID format and then find today's objects by
using a command like gsutil ls gs://bucket/2011-09-27-*. Note that it
is more efficient to have a non-wildcard prefix like this than to use
something like gsutil ls gs://bucket/*-2011-09-27. The latter command
actually requests a complete bucket listing and then filters in gsutil,
while the former asks Google Storage to return the subset of objects
whose names start with everything up to the *.
For data uploads, another technique would be to move local files from a "to
be processed" area to a "done" area as your script successfully copies files
to the cloud. You can do this in parallel batches by using a command like:
gsutil -m cp -R to_upload/subdir_$i gs://bucket/subdir_$i
where i is a shell loop variable. Make sure to check the shell $status
variable is 0 after each gsutil cp command, to detect if some of the copies
failed, and rerun the affected copies.
With this strategy, the file system keeps track of all remaining work to
be done.
4. If you have really large numbers of objects in a single bucket
(say hundreds of thousands or more), you should consider tracking your
objects in a database instead of using bucket listings to enumerate
the objects. For example this database could track the state of your
downloads, so you can determine what objects need to be downloaded by
your periodic download script by querying the database locally instead
of performing a bucket listing.
5. Make sure you don't delete partially downloaded files after a transfer
fails: gsutil picks up where it left off (and performs an MD5 check of
the final downloaded content to ensure data integrity), so deleting
partially transferred files will cause you to lose progress and make
more wasteful use of your network. You should also make sure whatever
process is waiting to consume the downloaded data doesn't get pointed
at the partially downloaded files. One way to do this is to download
into a staging directory and then move successfully downloaded files to
a directory where consumer processes will read them.
6. If you have a fast network connection, you can speed up the transfer of
large numbers of files by using the gsutil -m (multi-threading /
multi-processing) option. Be aware, however, that gsutil doesn't attempt to
keep track of which files were downloaded successfully in cases where some
files failed to download. For example, if you use multi-threaded transfers
to download 100 files and 3 failed to download, it is up to your scripting
process to determine which transfers didn't succeed, and retry them. A
periodic check-and-run approach like outlined earlier would handle this case.
If you use parallel transfers (gsutil -m) you might want to experiment with
the number of threads being used (via the parallel_thread_count setting
in the .boto config file). By default, gsutil uses 24 threads. Depending
on your network speed, available memory, CPU load, and other conditions,
this may or may not be optimal. Try experimenting with higher or lower
numbers of threads, to find the best number of threads for your environment.
""")
class CommandOptions(HelpProvider):
"""Additional help about using gsutil for production tasks."""
help_spec = {
# Name of command or auxiliary help info for which this help applies.
HELP_NAME : 'prod',
# List of help name aliases.
HELP_NAME_ALIASES : ['production', 'resumable', 'resumable upload',
'resumable transfer', 'resumable download',
'scripts', 'scripting'],
# Type of help:
HELP_TYPE : HelpType.ADDITIONAL_HELP,
# One line summary of this help.
HELP_ONE_LINE_SUMMARY : 'Scripting production data transfers with gsutil',
# The full help text.
HELP_TEXT : _detailed_help_text,
}
| bsd-3-clause | 5,415,862,029,930,760,000 | 52.625 | 79 | 0.765501 | false |
caioserra/apiAdwords | examples/adspygoogle/dfp/v201308/get_all_orders.py | 2 | 1619 | #!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets all orders. To create orders, run create_orders.py."""
__author__ = '[email protected] (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
from adspygoogle.dfp import DfpUtils
# Initialize client object.
client = DfpClient(path=os.path.join('..', '..', '..', '..'))
# Initialize appropriate service.
order_service = client.GetService('OrderService', version='v201308')
# Get orders by statement.
orders = DfpUtils.GetAllEntitiesByStatementWithService(order_service)
# Display results.
for order in orders:
print ('Order with id \'%s\', name \'%s\', and advertiser id \'%s\' was '
'found.' % (order['id'], order['name'], order['advertiserId']))
print
print 'Number of results found: %s' % len(orders)
| apache-2.0 | 1,195,587,120,636,674,800 | 33.446809 | 80 | 0.718962 | false |
slgobinath/SafeEyes | safeeyes/plugins/audiblealert/plugin.py | 1 | 2359 | #!/usr/bin/env python
# Safe Eyes is a utility to remind you to take break frequently
# to protect your eyes from eye strain.
# Copyright (C) 2017 Gobinath
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Audible Alert plugin plays a sound after each breaks to notify the user that the break has end.
"""
import logging
from safeeyes import utility
context = None
pre_break_alert = False
post_break_alert = False
def play_sound(resource_name):
"""Play the audio resource.
Arguments:
resource_name {string} -- name of the wav file resource
"""
logging.info('Playing audible alert %s', resource_name)
try:
# Open the sound file
path = utility.get_resource_path(resource_name)
if path is None:
return
utility.execute_command('aplay', ['-q', path])
except BaseException:
logging.error('Failed to play audible alert %s', resource_name)
def init(ctx, safeeyes_config, plugin_config):
"""
Initialize the plugin.
"""
global context
global pre_break_alert
global post_break_alert
logging.debug('Initialize Audible Alert plugin')
context = ctx
pre_break_alert = plugin_config['pre_break_alert']
post_break_alert = plugin_config['post_break_alert']
def on_pre_break(break_obj):
"""Play the pre_break sound if the option is enabled.
Arguments:
break_obj {safeeyes.model.Break} -- the break object
"""
if pre_break_alert:
play_sound('on_pre_break.wav')
def on_stop_break():
"""
After the break, play the alert sound
"""
# Do not play if the break is skipped or postponed
if context['skipped'] or context['postponed'] or not post_break_alert:
return
play_sound('on_stop_break.wav')
| gpl-3.0 | -8,379,223,866,682,541,000 | 28.860759 | 95 | 0.688851 | false |
maxisi/gwsumm | gwsumm/html/utils.py | 1 | 1587 | # -*- coding: utf-8 -*-
# Copyright (C) Duncan Macleod (2013)
#
# This file is part of GWSumm.
#
# GWSumm is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GWSumm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GWSumm. If not, see <http://www.gnu.org/licenses/>.
"""Utilties for HTML generation
"""
import subprocess
from gwsumm import version
__author__ = 'Duncan Macleod <[email protected]>'
__version__ = version.version
def highlight_syntax(filepath, format_):
"""Return an HTML-formatted copy of the file with syntax highlighting
"""
highlight = ['highlight', '--out-format', 'html', '--syntax', format_,
'--inline-css', '--fragment', '--input', filepath]
try:
process = subprocess.Popen(highlight, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
except OSError:
with open(filepath, 'r') as fobj:
return fobj.read()
else:
out, err = process.communicate()
if process.returncode != 0:
with open(filepath, 'r') as fobj:
return fobj.read()
else:
return out | gpl-3.0 | 7,918,633,420,352,642,000 | 32.787234 | 74 | 0.653434 | false |
chrismeyersfsu/ansible | lib/ansible/modules/network/dellos6/dellos6_command.py | 32 | 6660 | #!/usr/bin/python
#
# (c) 2015 Peter Sprygada, <[email protected]>
#
# Copyright (c) 2016 Dell Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = """
---
module: dellos6_command
version_added: "2.2"
short_description: Run commands on remote devices running Dell OS6
description:
- Sends arbitrary commands to a Dell OS6 node and returns the results
read from the device. The M(dellos6_command) module includes an
argument that will cause the module to wait for a specific condition
before returning or timing out if the condition is not met.
- This module does not support running commands in configuration mode.
Please use M(dellos6_config) to configure Dell OS6 devices.
extends_documentation_fragment: dellos6
options:
commands:
description:
- List of commands to send to the remote dellos6 device over the
configured provider. The resulting output from the command
is returned. If the I(wait_for) argument is provided, the
module is not returned until the condition is satisfied or
the number of I(retries) as expired.
required: true
wait_for:
description:
- List of conditions to evaluate against the output of the
command. The task will wait for each condition to be true
before moving forward. If the conditional is not true
within the configured number of I(retries), the task fails.
See examples.
required: false
default: null
retries:
description:
- Specifies the number of retries a command should be tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the
I(wait_for) conditions.
required: false
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditions, the interval indicates how long to wait before
trying the command again.
required: false
default: 1
"""
EXAMPLES = """
# Note: examples below use the following provider dict to handle
# transport and authentication to the node.
vars:
cli:
host: "{{ inventory_hostname }}"
username: admin
password: admin
transport: cli
tasks:
- name: run show version on remote devices
dellos6_command:
commands: show version
provider "{{ cli }}"
- name: run show version and check to see if output contains Dell
dellos6_command:
commands: show version
wait_for: result[0] contains Dell
provider "{{ cli }}"
- name: run multiple commands on remote nodes
dellos6_command:
commands:
- show version
- show interfaces
provider "{{ cli }}"
- name: run multiple commands and evaluate the output
dellos6_command:
commands:
- show version
- show interfaces
wait_for:
- result[0] contains Dell
- result[1] contains Access
provider "{{ cli }}"
"""
RETURN = """
stdout:
description: The set of responses from the commands
returned: always
type: list
sample: ['...', '...']
stdout_lines:
description: The value of stdout split into a list
returned: always
type: list
sample: [['...', '...'], ['...'], ['...']]
failed_conditions:
description: The list of conditionals that have failed
returned: failed
type: list
sample: ['...', '...']
warnings:
description: The list of warnings (if any) generated by module based on arguments
returned: always
type: list
sample: ['...', '...']
"""
from ansible.module_utils.basic import get_exception
from ansible.module_utils.netcli import CommandRunner, FailedConditionsError
from ansible.module_utils.network import NetworkModule, NetworkError
import ansible.module_utils.dellos6
def to_lines(stdout):
for item in stdout:
if isinstance(item, basestring):
item = str(item).split('\n')
yield item
def main():
spec = dict(
commands=dict(type='list', required=True),
wait_for=dict(type='list'),
retries=dict(default=10, type='int'),
interval=dict(default=1, type='int')
)
module = NetworkModule(argument_spec=spec,
connect_on_load=False,
supports_check_mode=True)
commands = module.params['commands']
conditionals = module.params['wait_for'] or list()
warnings = list()
runner = CommandRunner(module)
for cmd in commands:
if module.check_mode and not cmd.startswith('show'):
warnings.append('only show commands are supported when using '
'check mode, not executing `%s`' % cmd)
else:
if cmd.startswith('conf'):
module.fail_json(msg='dellos6_command does not support running '
'config mode commands. Please use '
'dellos6_config instead')
runner.add_command(cmd)
for item in conditionals:
runner.add_conditional(item)
runner.retries = module.params['retries']
runner.interval = module.params['interval']
try:
runner.run()
except FailedConditionsError:
exc = get_exception()
module.fail_json(msg=str(exc), failed_conditions=exc.failed_conditions)
except NetworkError:
exc = get_exception()
module.fail_json(msg=str(exc))
result = dict(changed=False)
result['stdout'] = list()
for cmd in commands:
try:
output = runner.get_command(cmd)
except ValueError:
output = 'command not executed due to check_mode, see warnings'
result['stdout'].append(output)
result['warnings'] = warnings
result['stdout_lines'] = list(to_lines(result['stdout']))
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | -3,904,057,700,746,909,000 | 29.833333 | 83 | 0.653754 | false |
openwisp/django-netjsonconfig | django_netjsonconfig/tests/test_views.py | 1 | 1185 | from django.contrib.auth import get_user_model
from django.test import TestCase
from django.urls import reverse
User = get_user_model()
class TestViews(TestCase):
"""
tests for django_netjsonconfig.views
"""
def setUp(self):
User.objects.create_superuser(
username='admin', password='tester', email='[email protected]'
)
def test_schema_403(self):
response = self.client.get(reverse('admin:schema'))
self.assertEqual(response.status_code, 403)
self.assertIn('error', response.json())
def test_schema_200(self):
self.client.force_login(User.objects.get(pk=1))
response = self.client.get(reverse('admin:schema'))
self.assertEqual(response.status_code, 200)
self.assertIn('netjsonconfig.OpenWrt', response.json())
def test_schema_hostname_hidden(self):
from ..views import available_schemas
for key, schema in available_schemas.items():
if 'general' not in schema['properties']:
continue
if 'hostname' in schema['properties']['general']['properties']:
self.fail('hostname property must be hidden')
| gpl-3.0 | 561,502,092,984,339,260 | 31.916667 | 75 | 0.643882 | false |
ergs/rickshaw | tests/test_choose_niches.py | 1 | 1257 | # -*- coding: utf-8 -*-
"""
Created on Fri Sep 23 10:49:39 2016
@author: adam
"""
import pytest
from rickshaw.simspec import SimSpec, def_niches, def_commodities
from rickshaw.generate import random_niches, choose_commodity, choose_commodities
@pytest.mark.parametrize("i", range(100))
def test_random_niches(i):
spec = SimSpec()
obs = random_niches(spec, 10)
assert isinstance(obs, list)
assert "mine" in obs
assert None not in obs
assert len(obs) <= 10
for niche in obs:
assert niche in def_niches()
@pytest.mark.parametrize("i", range(100))
def test_random_niches_startkey(i):
spec = SimSpec()
obs = random_niches(spec, 10, "enrichment")
assert isinstance(obs, list)
assert "enrichment" in obs
assert "mine" not in obs
assert None not in obs
assert len(obs) <= 10
for niche in obs:
assert niche in def_niches()
"""
def test_has_commodity(): #up_hierarchy function minimizes the error of a commodity not existing
obs_niches = random_niches(10)
for keyfrom, keyto in zip(obs_niches[:-1], obs_niches[1:]):
commod = choose_commodity(keyfrom, keyto, ())
assert commod in COMMODITIES.values() #check to see if the chosen commodity exists as a value
"""
| bsd-3-clause | -3,161,080,474,548,095,000 | 30.425 | 101 | 0.675418 | false |
Barmaley-exe/scikit-learn | examples/tree/plot_iris.py | 271 | 2186 | """
================================================================
Plot the decision surface of a decision tree on the iris dataset
================================================================
Plot the decision surface of a decision tree trained on pairs
of features of the iris dataset.
See :ref:`decision tree <tree>` for more information on the estimator.
For each pair of iris features, the decision tree learns decision
boundaries made of combinations of simple thresholding rules inferred from
the training samples.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import load_iris
from sklearn.tree import DecisionTreeClassifier
# Parameters
n_classes = 3
plot_colors = "bry"
plot_step = 0.02
# Load data
iris = load_iris()
for pairidx, pair in enumerate([[0, 1], [0, 2], [0, 3],
[1, 2], [1, 3], [2, 3]]):
# We only take the two corresponding features
X = iris.data[:, pair]
y = iris.target
# Shuffle
idx = np.arange(X.shape[0])
np.random.seed(13)
np.random.shuffle(idx)
X = X[idx]
y = y[idx]
# Standardize
mean = X.mean(axis=0)
std = X.std(axis=0)
X = (X - mean) / std
# Train
clf = DecisionTreeClassifier().fit(X, y)
# Plot the decision boundary
plt.subplot(2, 3, pairidx + 1)
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, plot_step),
np.arange(y_min, y_max, plot_step))
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
cs = plt.contourf(xx, yy, Z, cmap=plt.cm.Paired)
plt.xlabel(iris.feature_names[pair[0]])
plt.ylabel(iris.feature_names[pair[1]])
plt.axis("tight")
# Plot the training points
for i, color in zip(range(n_classes), plot_colors):
idx = np.where(y == i)
plt.scatter(X[idx, 0], X[idx, 1], c=color, label=iris.target_names[i],
cmap=plt.cm.Paired)
plt.axis("tight")
plt.suptitle("Decision surface of a decision tree using paired features")
plt.legend()
plt.show()
| bsd-3-clause | -5,256,231,851,708,946,000 | 27.025641 | 78 | 0.58097 | false |
amondot/QGIS | python/plugins/processing/algs/lidar/lastools/lasgrid.py | 9 | 3382 | # -*- coding: utf-8 -*-
"""
***************************************************************************
lasgrid.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
---------------------
Date : September 2013
Copyright : (C) 2013 by Martin Isenburg
Email : martin near rapidlasso point com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from LAStoolsUtils import LAStoolsUtils
from LAStoolsAlgorithm import LAStoolsAlgorithm
from processing.core.parameters import ParameterSelection
from processing.core.parameters import ParameterBoolean
class lasgrid(LAStoolsAlgorithm):
ATTRIBUTE = "ATTRIBUTE"
METHOD = "METHOD"
ATTRIBUTES = ["elevation", "intensity", "rgb", "classification"]
METHODS = ["lowest", "highest", "average", "stddev"]
USE_TILE_BB = "USE_TILE_BB"
def defineCharacteristics(self):
self.name = "lasgrid"
self.group = "LAStools"
self.addParametersVerboseGUI()
self.addParametersPointInputGUI()
self.addParametersFilter1ReturnClassFlagsGUI()
self.addParametersStepGUI()
self.addParameter(ParameterSelection(lasgrid.ATTRIBUTE,
self.tr("Attribute"), lasgrid.ATTRIBUTES, 0))
self.addParameter(ParameterSelection(lasgrid.METHOD,
self.tr("Method"), lasgrid.METHODS, 0))
self.addParameter(ParameterBoolean(lasgrid.USE_TILE_BB,
self.tr("use tile bounding box (after tiling with buffer)"), False))
self.addParametersRasterOutputGUI()
self.addParametersAdditionalGUI()
def processAlgorithm(self, progress):
commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "lasgrid")]
self.addParametersVerboseCommands(commands)
self.addParametersPointInputCommands(commands)
self.addParametersFilter1ReturnClassFlagsCommands(commands)
self.addParametersStepCommands(commands)
attribute = self.getParameterValue(lasgrid.ATTRIBUTE)
if attribute != 0:
commands.append("-" + lasgrid.ATTRIBUTES[attribute])
method = self.getParameterValue(lasgrid.METHOD)
if method != 0:
commands.append("-" + lasgrid.METHODS[method])
if (self.getParameterValue(lasgrid.USE_TILE_BB)):
commands.append("-use_tile_bb")
self.addParametersRasterOutputCommands(commands)
self.addParametersAdditionalCommands(commands)
LAStoolsUtils.runLAStools(commands, progress)
| gpl-2.0 | -4,707,366,093,024,928,000 | 42.358974 | 81 | 0.578356 | false |
qubitcoin/QubitCoin-p2pool | p2pool/web.py | 6 | 25283 | from __future__ import division
import errno
import json
import os
import sys
import time
import traceback
from twisted.internet import defer
from twisted.python import log
from twisted.web import resource, static
import p2pool
from bitcoin import data as bitcoin_data
from . import data as p2pool_data
from util import deferral, deferred_resource, graph, math, memory, pack, variable
def _atomic_read(filename):
try:
with open(filename, 'rb') as f:
return f.read()
except IOError, e:
if e.errno != errno.ENOENT:
raise
try:
with open(filename + '.new', 'rb') as f:
return f.read()
except IOError, e:
if e.errno != errno.ENOENT:
raise
return None
def _atomic_write(filename, data):
with open(filename + '.new', 'wb') as f:
f.write(data)
f.flush()
try:
os.fsync(f.fileno())
except:
pass
try:
os.rename(filename + '.new', filename)
except: # XXX windows can't overwrite
os.remove(filename)
os.rename(filename + '.new', filename)
def get_web_root(wb, datadir_path, bitcoind_warning_var, stop_event=variable.Event()):
node = wb.node
start_time = time.time()
web_root = resource.Resource()
def get_users():
height, last = node.tracker.get_height_and_last(node.best_share_var.value)
weights, total_weight, donation_weight = node.tracker.get_cumulative_weights(node.best_share_var.value, min(height, 720), 65535*2**256)
res = {}
for script in sorted(weights, key=lambda s: weights[s]):
res[bitcoin_data.script2_to_address(script, node.net.PARENT)] = weights[script]/total_weight
return res
def get_current_scaled_txouts(scale, trunc=0):
txouts = node.get_current_txouts()
total = sum(txouts.itervalues())
results = dict((script, value*scale//total) for script, value in txouts.iteritems())
if trunc > 0:
total_random = 0
random_set = set()
for s in sorted(results, key=results.__getitem__):
if results[s] >= trunc:
break
total_random += results[s]
random_set.add(s)
if total_random:
winner = math.weighted_choice((script, results[script]) for script in random_set)
for script in random_set:
del results[script]
results[winner] = total_random
if sum(results.itervalues()) < int(scale):
results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
return results
def get_patron_sendmany(total=None, trunc='0.01'):
if total is None:
return 'need total argument. go to patron_sendmany/<TOTAL>'
###Neisklar: This should be defined in the network-settings not hardcoded...
total = int(float(total)*1e5)
trunc = int(float(trunc)*1e5)
return json.dumps(dict(
(bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e5)
for script, value in get_current_scaled_txouts(total, trunc).iteritems()
if bitcoin_data.script2_to_address(script, node.net.PARENT) is not None
))
def get_global_stats():
# averaged over last hour
if node.tracker.get_height(node.best_share_var.value) < 10:
return None
lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD)
nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)
stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
return dict(
pool_nonstale_hash_rate=nonstale_hash_rate,
pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
pool_stale_prop=stale_prop,
min_difficulty=bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target),
)
def get_local_stats():
if node.tracker.get_height(node.best_share_var.value) < 10:
return None
lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD)
global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
my_unstale_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes)
my_orphan_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'orphan')
my_doa_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'doa')
my_share_count = my_unstale_count + my_orphan_count + my_doa_count
my_stale_count = my_orphan_count + my_doa_count
my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
for share in node.tracker.get_chain(node.best_share_var.value, lookbehind - 1)
if share.hash in wb.my_share_hashes)
actual_time = (node.tracker.items[node.best_share_var.value].timestamp -
node.tracker.items[node.tracker.get_nth_parent_hash(node.best_share_var.value, lookbehind - 1)].timestamp)
share_att_s = my_work / actual_time
miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
(stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
return dict(
my_hash_rates_in_last_hour=dict(
note="DEPRECATED",
nonstale=share_att_s,
rewarded=share_att_s/(1 - global_stale_prop),
actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
),
my_share_counts_in_last_hour=dict(
shares=my_share_count,
unstale_shares=my_unstale_count,
stale_shares=my_stale_count,
orphan_stale_shares=my_orphan_count,
doa_stale_shares=my_doa_count,
),
my_stale_proportions_in_last_hour=dict(
stale=my_stale_prop,
orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
),
miner_hash_rates=miner_hash_rates,
miner_dead_hash_rates=miner_dead_hash_rates,
efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
efficiency=(1 - (stale_orphan_shares+stale_doa_shares)/shares)/(1 - global_stale_prop) if shares else None,
peers=dict(
incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
),
shares=dict(
total=shares,
orphan=stale_orphan_shares,
dead=stale_doa_shares,
),
uptime=time.time() - start_time,
attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target),
attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target),
block_value=node.bitcoind_work.value['subsidy']*1e-5,
warnings=p2pool_data.get_warnings(node.tracker, node.best_share_var.value, node.net, bitcoind_warning_var.value, node.bitcoind_work.value),
donation_proportion=wb.donation_percentage/100,
)
class WebInterface(deferred_resource.DeferredResource):
def __init__(self, func, mime_type='application/json', args=()):
deferred_resource.DeferredResource.__init__(self)
self.func, self.mime_type, self.args = func, mime_type, args
def getChild(self, child, request):
return WebInterface(self.func, self.mime_type, self.args + (child,))
@defer.inlineCallbacks
def render_GET(self, request):
request.setHeader('Content-Type', self.mime_type)
request.setHeader('Access-Control-Allow-Origin', '*')
res = yield self.func(*self.args)
defer.returnValue(json.dumps(res) if self.mime_type == 'application/json' else res)
def decent_height():
return min(node.tracker.get_height(node.best_share_var.value), 720)
web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, decent_height())/(1-p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, decent_height()))))
web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target)))
web_root.putChild('users', WebInterface(get_users))
web_root.putChild('user_stales', WebInterface(lambda: dict((bitcoin_data.pubkey_hash_to_address(ph, node.net.PARENT), prop) for ph, prop in
p2pool_data.get_user_stale_props(node.tracker, node.best_share_var.value, node.tracker.get_height(node.best_share_var.value)).iteritems())))
web_root.putChild('fee', WebInterface(lambda: wb.worker_fee))
web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e5) for script, value in node.get_current_txouts().iteritems())))
web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain'))
web_root.putChild('global_stats', WebInterface(get_global_stats))
web_root.putChild('local_stats', WebInterface(get_local_stats))
web_root.putChild('peer_addresses', WebInterface(lambda: ' '.join('%s%s' % (peer.transport.getPeer().host, ':'+str(peer.transport.getPeer().port) if peer.transport.getPeer().port != node.net.P2P_PORT else '') for peer in node.p2p_node.peers.itervalues())))
web_root.putChild('peer_txpool_sizes', WebInterface(lambda: dict(('%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), peer.remembered_txs_size) for peer in node.p2p_node.peers.itervalues())))
web_root.putChild('pings', WebInterface(defer.inlineCallbacks(lambda: defer.returnValue(
dict([(a, (yield b)) for a, b in
[(
'%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port),
defer.inlineCallbacks(lambda peer=peer: defer.returnValue(
min([(yield peer.do_ping().addCallback(lambda x: x/0.001).addErrback(lambda fail: None)) for i in xrange(3)])
))()
) for peer in list(node.p2p_node.peers.itervalues())]
])
))))
web_root.putChild('peer_versions', WebInterface(lambda: dict(('%s:%i' % peer.addr, peer.other_sub_version) for peer in node.p2p_node.peers.itervalues())))
web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(wb.my_pubkey_hash, node.net.PARENT)))
web_root.putChild('recent_blocks', WebInterface(lambda: [dict(
ts=s.timestamp,
hash='%064x' % s.header_hash,
number=pack.IntType(24).unpack(s.share_data['coinbase'][1:4]) if len(s.share_data['coinbase']) >= 4 else None,
share='%064x' % s.hash,
) for s in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 24*60*60//node.net.SHARE_PERIOD)) if s.pow_hash <= s.header['bits'].target]))
web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, decent_height(), rates=True)))
new_root = resource.Resource()
web_root.putChild('web', new_root)
stat_log = []
if os.path.exists(os.path.join(datadir_path, 'stats')):
try:
with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
stat_log = json.loads(f.read())
except:
log.err(None, 'Error loading stats:')
def update_stat_log():
while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
stat_log.pop(0)
lookbehind = 3600//node.net.SHARE_PERIOD
if node.tracker.get_height(node.best_share_var.value) < lookbehind:
return None
global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
(stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
stat_log.append(dict(
time=time.time(),
pool_hash_rate=p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)/(1-global_stale_prop),
pool_stale_prop=global_stale_prop,
local_hash_rates=miner_hash_rates,
local_dead_hash_rates=miner_dead_hash_rates,
shares=shares,
stale_shares=stale_orphan_shares + stale_doa_shares,
stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
current_payout=node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0)*1e-5,
peers=dict(
incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
),
attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target),
attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target),
block_value=node.bitcoind_work.value['subsidy']*1e-5,
))
with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
f.write(json.dumps(stat_log))
x = deferral.RobustLoopingCall(update_stat_log)
x.start(5*60)
stop_event.watch(x.stop)
new_root.putChild('log', WebInterface(lambda: stat_log))
def get_share(share_hash_str):
if int(share_hash_str, 16) not in node.tracker.items:
return None
share = node.tracker.items[int(share_hash_str, 16)]
return dict(
parent='%064x' % share.previous_hash,
children=['%064x' % x for x in sorted(node.tracker.reverse.get(share.hash, set()), key=lambda sh: -len(node.tracker.reverse.get(sh, set())))], # sorted from most children to least children
type_name=type(share).__name__,
local=dict(
verified=share.hash in node.tracker.verified.items,
time_first_seen=start_time if share.time_seen == 0 else share.time_seen,
peer_first_received_from=share.peer_addr,
),
share_data=dict(
timestamp=share.timestamp,
target=share.target,
max_target=share.max_target,
payout_address=bitcoin_data.script2_to_address(share.new_script, node.net.PARENT),
donation=share.share_data['donation']/65535,
stale_info=share.share_data['stale_info'],
nonce=share.share_data['nonce'],
desired_version=share.share_data['desired_version'],
absheight=share.absheight,
abswork=share.abswork,
),
block=dict(
hash='%064x' % share.header_hash,
header=dict(
version=share.header['version'],
previous_block='%064x' % share.header['previous_block'],
merkle_root='%064x' % share.header['merkle_root'],
timestamp=share.header['timestamp'],
target=share.header['bits'].target,
nonce=share.header['nonce'],
),
gentx=dict(
hash='%064x' % share.gentx_hash,
coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'),
value=share.share_data['subsidy']*1e-5,
),
txn_count=len(list(share.iter_transaction_hash_refs())),
),
)
new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str)))
new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.heads]))
new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.verified.heads]))
new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.tails for x in node.tracker.reverse.get(t, set())]))
new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.verified.tails for x in node.tracker.verified.reverse.get(t, set())]))
new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % node.best_share_var.value))
def get_share_data(share_hash_str):
if int(share_hash_str, 16) not in node.tracker.items:
return ''
share = node.tracker.items[int(share_hash_str, 16)]
return p2pool_data.share_type.pack(share.as_share1a())
new_root.putChild('share_data', WebInterface(lambda share_hash_str: get_share_data(share_hash_str), 'application/octet-stream'))
new_root.putChild('currency_info', WebInterface(lambda: dict(
symbol=node.net.PARENT.SYMBOL,
block_explorer_url_prefix=node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
address_explorer_url_prefix=node.net.PARENT.ADDRESS_EXPLORER_URL_PREFIX,
)))
new_root.putChild('version', WebInterface(lambda: p2pool.__version__))
hd_path = os.path.join(datadir_path, 'graph_db')
hd_data = _atomic_read(hd_path)
hd_obj = {}
if hd_data is not None:
try:
hd_obj = json.loads(hd_data)
except Exception:
log.err(None, 'Error reading graph database:')
dataview_descriptions = {
'last_hour': graph.DataViewDescription(150, 60*60),
'last_day': graph.DataViewDescription(300, 60*60*24),
'last_week': graph.DataViewDescription(300, 60*60*24*7),
'last_month': graph.DataViewDescription(300, 60*60*24*30),
'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
}
def build_desired_rates(ds_name, ds_desc, dv_name, dv_desc, obj):
if not obj:
last_bin_end = 0
bins = dv_desc.bin_count*[{}]
else:
pool_rates = obj['pool_rates'][dv_name]
desired_versions = obj['desired_versions'][dv_name]
def get_total_pool_rate(t):
n = int((pool_rates['last_bin_end'] - t)/dv_desc.bin_width)
if n < 0 or n >= dv_desc.bin_count:
return None
total = sum(x[0] for x in pool_rates['bins'][n].values())
count = math.mean(x[1] for x in pool_rates['bins'][n].values())
if count == 0:
return None
return total/count
last_bin_end = desired_versions['last_bin_end']
bins = [dict((name, (total*get_total_pool_rate(last_bin_end - (i+1/2)*dv_desc.bin_width), count)) for name, (total, count) in desired_versions['bins'][i].iteritems()) for i in xrange(dv_desc.bin_count)]
return graph.DataView(dv_desc, ds_desc, last_bin_end, bins)
hd = graph.HistoryDatabase.from_obj({
'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
'local_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
'local_dead_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
multivalue_undefined_means_0=True),
'current_payout': graph.DataStreamDescription(dataview_descriptions),
'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True),
'incoming_peers': graph.DataStreamDescription(dataview_descriptions),
'outgoing_peers': graph.DataStreamDescription(dataview_descriptions),
'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
'desired_versions': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
multivalue_undefined_means_0=True),
'desired_version_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
multivalue_undefined_means_0=True, default_func=build_desired_rates),
'traffic_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
'getwork_latency': graph.DataStreamDescription(dataview_descriptions),
'memory_usage': graph.DataStreamDescription(dataview_descriptions),
}, hd_obj)
x = deferral.RobustLoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj())))
x.start(100)
stop_event.watch(x.stop)
@wb.pseudoshare_received.watch
def _(work, dead, user):
t = time.time()
hd.datastreams['local_hash_rate'].add_datum(t, work)
if dead:
hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
if user is not None:
hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
if dead:
hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work})
@wb.share_received.watch
def _(work, dead):
t = time.time()
hd.datastreams['local_share_hash_rate'].add_datum(t, work)
if dead:
hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work)
@node.p2p_node.traffic_happened.watch
def _(name, bytes):
hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes})
def add_point():
if node.tracker.get_height(node.best_share_var.value) < 10:
return None
lookbehind = min(node.net.CHAIN_LENGTH, 60*60//node.net.SHARE_PERIOD, node.tracker.get_height(node.best_share_var.value))
t = time.time()
pool_rates = p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, lookbehind, rates=True)
pool_total = sum(pool_rates.itervalues())
hd.datastreams['pool_rates'].add_datum(t, pool_rates)
current_txouts = node.get_current_txouts()
hd.datastreams['current_payout'].add_datum(t, current_txouts.get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0)*1e-5)
miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
current_txouts_by_address = dict((bitcoin_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems())
hd.datastreams['current_payouts'].add_datum(t, dict((user, current_txouts_by_address[user]*1e-5) for user in miner_hash_rates if user in current_txouts_by_address))
hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming))
hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming))
vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind)
vs_total = sum(vs.itervalues())
hd.datastreams['desired_versions'].add_datum(t, dict((str(k), v/vs_total) for k, v in vs.iteritems()))
hd.datastreams['desired_version_rates'].add_datum(t, dict((str(k), v/vs_total*pool_total) for k, v in vs.iteritems()))
try:
hd.datastreams['memory_usage'].add_datum(t, memory.resident())
except:
if p2pool.DEBUG:
traceback.print_exc()
x = deferral.RobustLoopingCall(add_point)
x.start(5)
stop_event.watch(x.stop)
@node.bitcoind_work.changed.watch
def _(new_work):
hd.datastreams['getwork_latency'].add_datum(time.time(), new_work['latency'])
new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time())))
web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static')))
return web_root
| gpl-3.0 | 1,099,758,611,428,907,500 | 55.059867 | 260 | 0.631136 | false |
lardissone/mcfinance | mcfinance/settings.py | 1 | 2971 | """
Django settings for mcfinance project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
from mongoengine import connect
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'g=4l@71714t3+6aqc(1f3oswm1q9+y-&wtxvllla^zkk*#_e6-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# 3rd party apps
'bootstrap3',
'mongoengine.django.mongo_auth',
# MC Finance apps
'mcfinance.core.CoreConfig',
'mcfinance.transactions.TransactionsConfig',
# Others
'django_extensions',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'mcfinance.urls'
WSGI_APPLICATION = 'mcfinance.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.dummy'
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'mcfinance.core.contextprocessors.mcvars',
)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
# Mongoengine
MONGODB_HOST = 'localhost'
MONGODB_PORT = 27017
MONGODB_DB = 'mcfinance'
# Auth
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
# Session
SESSION_ENGINE = 'mongoengine.django.sessions'
SESSION_SERIALIZER = 'mongoengine.django.sessions.BSONSerializer'
| mit | -2,923,366,692,722,441,700 | 23.553719 | 71 | 0.733423 | false |
egaxegax/django-dbcartajs | django/contrib/flatpages/templatetags/flatpages.py | 227 | 3631 | from django import template
from django.conf import settings
from django.contrib.flatpages.models import FlatPage
from django.contrib.sites.models import get_current_site
register = template.Library()
class FlatpageNode(template.Node):
def __init__(self, context_name, starts_with=None, user=None):
self.context_name = context_name
if starts_with:
self.starts_with = template.Variable(starts_with)
else:
self.starts_with = None
if user:
self.user = template.Variable(user)
else:
self.user = None
def render(self, context):
if 'request' in context:
site_pk = get_current_site(context['request']).pk
else:
site_pk = settings.SITE_ID
flatpages = FlatPage.objects.filter(sites__id=site_pk)
# If a prefix was specified, add a filter
if self.starts_with:
flatpages = flatpages.filter(
url__startswith=self.starts_with.resolve(context))
# If the provided user is not authenticated, or no user
# was provided, filter the list to only public flatpages.
if self.user:
user = self.user.resolve(context)
if not user.is_authenticated():
flatpages = flatpages.filter(registration_required=False)
else:
flatpages = flatpages.filter(registration_required=False)
context[self.context_name] = flatpages
return ''
@register.tag
def get_flatpages(parser, token):
"""
Retrieves all flatpage objects available for the current site and
visible to the specific user (or visible to all users if no user is
specified). Populates the template context with them in a variable
whose name is defined by the ``as`` clause.
An optional ``for`` clause can be used to control the user whose
permissions are to be used in determining which flatpages are visible.
An optional argument, ``starts_with``, can be applied to limit the
returned flatpages to those beginning with a particular base URL.
This argument can be passed as a variable or a string, as it resolves
from the template context.
Syntax::
{% get_flatpages ['url_starts_with'] [for user] as context_name %}
Example usage::
{% get_flatpages as flatpages %}
{% get_flatpages for someuser as flatpages %}
{% get_flatpages '/about/' as about_pages %}
{% get_flatpages prefix as about_pages %}
{% get_flatpages '/about/' for someuser as about_pages %}
"""
bits = token.split_contents()
syntax_message = ("%(tag_name)s expects a syntax of %(tag_name)s "
"['url_starts_with'] [for user] as context_name" %
dict(tag_name=bits[0]))
# Must have at 3-6 bits in the tag
if len(bits) >= 3 and len(bits) <= 6:
# If there's an even number of bits, there's no prefix
if len(bits) % 2 == 0:
prefix = bits[1]
else:
prefix = None
# The very last bit must be the context name
if bits[-2] != 'as':
raise template.TemplateSyntaxError(syntax_message)
context_name = bits[-1]
# If there are 5 or 6 bits, there is a user defined
if len(bits) >= 5:
if bits[-4] != 'for':
raise template.TemplateSyntaxError(syntax_message)
user = bits[-3]
else:
user = None
return FlatpageNode(context_name, starts_with=prefix, user=user)
else:
raise template.TemplateSyntaxError(syntax_message)
| gpl-2.0 | -4,807,135,271,542,821,000 | 34.598039 | 74 | 0.616635 | false |
ABaldwinHunter/django-clone | django/contrib/gis/utils/ogrinspect.py | 391 | 9090 | """
This module is for inspecting OGR data sources and generating either
models for GeoDjango and/or mapping dictionaries for use with the
`LayerMapping` utility.
"""
from django.contrib.gis.gdal import DataSource
from django.contrib.gis.gdal.field import (
OFTDate, OFTDateTime, OFTInteger, OFTInteger64, OFTReal, OFTString,
OFTTime,
)
from django.utils import six
from django.utils.six.moves import zip
def mapping(data_source, geom_name='geom', layer_key=0, multi_geom=False):
"""
Given a DataSource, generates a dictionary that may be used
for invoking the LayerMapping utility.
Keyword Arguments:
`geom_name` => The name of the geometry field to use for the model.
`layer_key` => The key for specifying which layer in the DataSource to use;
defaults to 0 (the first layer). May be an integer index or a string
identifier for the layer.
`multi_geom` => Boolean (default: False) - specify as multigeometry.
"""
if isinstance(data_source, six.string_types):
# Instantiating the DataSource from the string.
data_source = DataSource(data_source)
elif isinstance(data_source, DataSource):
pass
else:
raise TypeError('Data source parameter must be a string or a DataSource object.')
# Creating the dictionary.
_mapping = {}
# Generating the field name for each field in the layer.
for field in data_source[layer_key].fields:
mfield = field.lower()
if mfield[-1:] == '_':
mfield += 'field'
_mapping[mfield] = field
gtype = data_source[layer_key].geom_type
if multi_geom:
gtype.to_multi()
_mapping[geom_name] = str(gtype).upper()
return _mapping
def ogrinspect(*args, **kwargs):
"""
Given a data source (either a string or a DataSource object) and a string
model name this function will generate a GeoDjango model.
Usage:
>>> from django.contrib.gis.utils import ogrinspect
>>> ogrinspect('/path/to/shapefile.shp','NewModel')
...will print model definition to stout
or put this in a python script and use to redirect the output to a new
model like:
$ python generate_model.py > myapp/models.py
# generate_model.py
from django.contrib.gis.utils import ogrinspect
shp_file = 'data/mapping_hacks/world_borders.shp'
model_name = 'WorldBorders'
print(ogrinspect(shp_file, model_name, multi_geom=True, srid=4326,
geom_name='shapes', blank=True))
Required Arguments
`datasource` => string or DataSource object to file pointer
`model name` => string of name of new model class to create
Optional Keyword Arguments
`geom_name` => For specifying the model name for the Geometry Field.
Otherwise will default to `geom`
`layer_key` => The key for specifying which layer in the DataSource to use;
defaults to 0 (the first layer). May be an integer index or a string
identifier for the layer.
`srid` => The SRID to use for the Geometry Field. If it can be determined,
the SRID of the datasource is used.
`multi_geom` => Boolean (default: False) - specify as multigeometry.
`name_field` => String - specifies a field name to return for the
`__unicode__`/`__str__` function (which will be generated if specified).
`imports` => Boolean (default: True) - set to False to omit the
`from django.contrib.gis.db import models` code from the
autogenerated models thus avoiding duplicated imports when building
more than one model by batching ogrinspect()
`decimal` => Boolean or sequence (default: False). When set to True
all generated model fields corresponding to the `OFTReal` type will
be `DecimalField` instead of `FloatField`. A sequence of specific
field names to generate as `DecimalField` may also be used.
`blank` => Boolean or sequence (default: False). When set to True all
generated model fields will have `blank=True`. If the user wants to
give specific fields to have blank, then a list/tuple of OGR field
names may be used.
`null` => Boolean (default: False) - When set to True all generated
model fields will have `null=True`. If the user wants to specify
give specific fields to have null, then a list/tuple of OGR field
names may be used.
Note: This routine calls the _ogrinspect() helper to do the heavy lifting.
"""
return '\n'.join(s for s in _ogrinspect(*args, **kwargs))
def _ogrinspect(data_source, model_name, geom_name='geom', layer_key=0, srid=None,
multi_geom=False, name_field=None, imports=True,
decimal=False, blank=False, null=False):
"""
Helper routine for `ogrinspect` that generates GeoDjango models corresponding
to the given data source. See the `ogrinspect` docstring for more details.
"""
# Getting the DataSource
if isinstance(data_source, six.string_types):
data_source = DataSource(data_source)
elif isinstance(data_source, DataSource):
pass
else:
raise TypeError('Data source parameter must be a string or a DataSource object.')
# Getting the layer corresponding to the layer key and getting
# a string listing of all OGR fields in the Layer.
layer = data_source[layer_key]
ogr_fields = layer.fields
# Creating lists from the `null`, `blank`, and `decimal`
# keyword arguments.
def process_kwarg(kwarg):
if isinstance(kwarg, (list, tuple)):
return [s.lower() for s in kwarg]
elif kwarg:
return [s.lower() for s in ogr_fields]
else:
return []
null_fields = process_kwarg(null)
blank_fields = process_kwarg(blank)
decimal_fields = process_kwarg(decimal)
# Gets the `null` and `blank` keywords for the given field name.
def get_kwargs_str(field_name):
kwlist = []
if field_name.lower() in null_fields:
kwlist.append('null=True')
if field_name.lower() in blank_fields:
kwlist.append('blank=True')
if kwlist:
return ', ' + ', '.join(kwlist)
else:
return ''
# For those wishing to disable the imports.
if imports:
yield '# This is an auto-generated Django model module created by ogrinspect.'
yield 'from django.contrib.gis.db import models'
yield ''
yield 'class %s(models.Model):' % model_name
for field_name, width, precision, field_type in zip(
ogr_fields, layer.field_widths, layer.field_precisions, layer.field_types):
# The model field name.
mfield = field_name.lower()
if mfield[-1:] == '_':
mfield += 'field'
# Getting the keyword args string.
kwargs_str = get_kwargs_str(field_name)
if field_type is OFTReal:
# By default OFTReals are mapped to `FloatField`, however, they
# may also be mapped to `DecimalField` if specified in the
# `decimal` keyword.
if field_name.lower() in decimal_fields:
yield ' %s = models.DecimalField(max_digits=%d, decimal_places=%d%s)' % (
mfield, width, precision, kwargs_str
)
else:
yield ' %s = models.FloatField(%s)' % (mfield, kwargs_str[2:])
elif field_type is OFTInteger:
yield ' %s = models.IntegerField(%s)' % (mfield, kwargs_str[2:])
elif field_type is OFTInteger64:
yield ' %s = models.BigIntegerField(%s)' % (mfield, kwargs_str[2:])
elif field_type is OFTString:
yield ' %s = models.CharField(max_length=%s%s)' % (mfield, width, kwargs_str)
elif field_type is OFTDate:
yield ' %s = models.DateField(%s)' % (mfield, kwargs_str[2:])
elif field_type is OFTDateTime:
yield ' %s = models.DateTimeField(%s)' % (mfield, kwargs_str[2:])
elif field_type is OFTTime:
yield ' %s = models.TimeField(%s)' % (mfield, kwargs_str[2:])
else:
raise TypeError('Unknown field type %s in %s' % (field_type, mfield))
# TODO: Autodetection of multigeometry types (see #7218).
gtype = layer.geom_type
if multi_geom:
gtype.to_multi()
geom_field = gtype.django
# Setting up the SRID keyword string.
if srid is None:
if layer.srs is None:
srid_str = 'srid=-1'
else:
srid = layer.srs.srid
if srid is None:
srid_str = 'srid=-1'
elif srid == 4326:
# WGS84 is already the default.
srid_str = ''
else:
srid_str = 'srid=%s' % srid
else:
srid_str = 'srid=%s' % srid
yield ' %s = models.%s(%s)' % (geom_name, geom_field, srid_str)
if name_field:
yield ''
yield ' def __%s__(self): return self.%s' % (
'str' if six.PY3 else 'unicode', name_field)
| bsd-3-clause | 4,898,510,895,667,357,000 | 37.033473 | 92 | 0.624532 | false |
thumbimigwe/echorizr | lib/python2.7/site-packages/django/utils/log.py | 84 | 5261 | from __future__ import unicode_literals
import logging
import logging.config # needed when logging_config doesn't start with logging.config
import sys
import warnings
from copy import copy
from django.conf import settings
from django.core import mail
from django.core.mail import get_connection
from django.utils.deprecation import RemovedInNextVersionWarning
from django.utils.module_loading import import_string
from django.views.debug import ExceptionReporter
# Default logging for Django. This sends an email to the site admins on every
# HTTP 500 error. Depending on DEBUG, all other log records are either sent to
# the console (DEBUG=True) or discarded (DEBUG=False) by means of the
# require_debug_true filter.
DEFAULT_LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'handlers': {
'console': {
'level': 'INFO',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
},
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django': {
'handlers': ['console', 'mail_admins'],
'level': 'INFO',
},
'py.warnings': {
'handlers': ['console'],
},
}
}
def configure_logging(logging_config, logging_settings):
if not sys.warnoptions:
# Route warnings through python logging
logging.captureWarnings(True)
# RemovedInNextVersionWarning is a subclass of DeprecationWarning which
# is hidden by default, hence we force the "default" behavior
warnings.simplefilter("default", RemovedInNextVersionWarning)
if logging_config:
# First find the logging configuration function ...
logging_config_func = import_string(logging_config)
logging.config.dictConfig(DEFAULT_LOGGING)
# ... then invoke it with the logging settings
if logging_settings:
logging_config_func(logging_settings)
class AdminEmailHandler(logging.Handler):
"""An exception log handler that emails log entries to site admins.
If the request is passed as the first argument to the log record,
request data will be provided in the email report.
"""
def __init__(self, include_html=False, email_backend=None):
logging.Handler.__init__(self)
self.include_html = include_html
self.email_backend = email_backend
def emit(self, record):
try:
request = record.request
subject = '%s (%s IP): %s' % (
record.levelname,
('internal' if request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS
else 'EXTERNAL'),
record.getMessage()
)
except Exception:
subject = '%s: %s' % (
record.levelname,
record.getMessage()
)
request = None
subject = self.format_subject(subject)
# Since we add a nicely formatted traceback on our own, create a copy
# of the log record without the exception data.
no_exc_record = copy(record)
no_exc_record.exc_info = None
no_exc_record.exc_text = None
if record.exc_info:
exc_info = record.exc_info
else:
exc_info = (None, record.getMessage(), None)
reporter = ExceptionReporter(request, is_email=True, *exc_info)
message = "%s\n\n%s" % (self.format(no_exc_record), reporter.get_traceback_text())
html_message = reporter.get_traceback_html() if self.include_html else None
self.send_mail(subject, message, fail_silently=True, html_message=html_message)
def send_mail(self, subject, message, *args, **kwargs):
mail.mail_admins(subject, message, *args, connection=self.connection(), **kwargs)
def connection(self):
return get_connection(backend=self.email_backend, fail_silently=True)
def format_subject(self, subject):
"""
Escape CR and LF characters, and limit length.
RFC 2822's hard limit is 998 characters per line. So, minus "Subject: "
the actual subject must be no longer than 989 characters.
"""
formatted_subject = subject.replace('\n', '\\n').replace('\r', '\\r')
return formatted_subject[:989]
class CallbackFilter(logging.Filter):
"""
A logging filter that checks the return value of a given callable (which
takes the record-to-be-logged as its only parameter) to decide whether to
log a record.
"""
def __init__(self, callback):
self.callback = callback
def filter(self, record):
if self.callback(record):
return 1
return 0
class RequireDebugFalse(logging.Filter):
def filter(self, record):
return not settings.DEBUG
class RequireDebugTrue(logging.Filter):
def filter(self, record):
return settings.DEBUG
| mit | 5,132,518,794,192,669,000 | 32.509554 | 90 | 0.623075 | false |
javiergarridomellado/Empresa_django | devcodela/lib/python2.7/site-packages/django/contrib/messages/api.py | 321 | 2952 | from django.contrib.messages import constants
from django.contrib.messages.storage import default_storage
__all__ = (
'add_message', 'get_messages',
'get_level', 'set_level',
'debug', 'info', 'success', 'warning', 'error',
)
class MessageFailure(Exception):
pass
def add_message(request, level, message, extra_tags='', fail_silently=False):
"""
Attempts to add a message to the request using the 'messages' app.
"""
if hasattr(request, '_messages'):
return request._messages.add(level, message, extra_tags)
if not fail_silently:
raise MessageFailure('You cannot add messages without installing '
'django.contrib.messages.middleware.MessageMiddleware')
def get_messages(request):
"""
Returns the message storage on the request if it exists, otherwise returns
an empty list.
"""
if hasattr(request, '_messages'):
return request._messages
else:
return []
def get_level(request):
"""
Returns the minimum level of messages to be recorded.
The default level is the ``MESSAGE_LEVEL`` setting. If this is not found,
the ``INFO`` level is used.
"""
if hasattr(request, '_messages'):
storage = request._messages
else:
storage = default_storage(request)
return storage.level
def set_level(request, level):
"""
Sets the minimum level of messages to be recorded, returning ``True`` if
the level was recorded successfully.
If set to ``None``, the default level will be used (see the ``get_level``
method).
"""
if not hasattr(request, '_messages'):
return False
request._messages.level = level
return True
def debug(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``DEBUG`` level.
"""
add_message(request, constants.DEBUG, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def info(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``INFO`` level.
"""
add_message(request, constants.INFO, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def success(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``SUCCESS`` level.
"""
add_message(request, constants.SUCCESS, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def warning(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``WARNING`` level.
"""
add_message(request, constants.WARNING, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def error(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``ERROR`` level.
"""
add_message(request, constants.ERROR, message, extra_tags=extra_tags,
fail_silently=fail_silently)
| gpl-2.0 | 8,667,711,474,014,288,000 | 27.941176 | 78 | 0.645325 | false |
romankagan/DDBWorkbench | python/lib/Lib/datetime.py | 74 | 74800 | """Concrete date/time and related types -- prototype implemented in Python.
See http://www.zope.org/Members/fdrake/DateTimeWiki/FrontPage
See also http://dir.yahoo.com/Reference/calendars/
For a primer on DST, including many current DST rules, see
http://webexhibits.org/daylightsaving/
For more about DST than you ever wanted to know, see
ftp://elsie.nci.nih.gov/pub/
Sources for time zone and DST data: http://www.twinsun.com/tz/tz-link.htm
This was originally copied from the sandbox of the CPython CVS repository.
Thanks to Tim Peters for suggesting using it.
"""
import time as _time
import math as _math
MINYEAR = 1
MAXYEAR = 9999
# Utility functions, adapted from Python's Demo/classes/Dates.py, which
# also assumes the current Gregorian calendar indefinitely extended in
# both directions. Difference: Dates.py calls January 1 of year 0 day
# number 1. The code here calls January 1 of year 1 day number 1. This is
# to match the definition of the "proleptic Gregorian" calendar in Dershowitz
# and Reingold's "Calendrical Calculations", where it's the base calendar
# for all computations. See the book for algorithms for converting between
# proleptic Gregorian ordinals and many other calendar systems.
_DAYS_IN_MONTH = [None, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
_DAYS_BEFORE_MONTH = [None]
dbm = 0
for dim in _DAYS_IN_MONTH[1:]:
_DAYS_BEFORE_MONTH.append(dbm)
dbm += dim
del dbm, dim
def _is_leap(year):
"year -> 1 if leap year, else 0."
return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
def _days_in_year(year):
"year -> number of days in year (366 if a leap year, else 365)."
return 365 + _is_leap(year)
def _days_before_year(year):
"year -> number of days before January 1st of year."
y = year - 1
return y*365 + y//4 - y//100 + y//400
def _days_in_month(year, month):
"year, month -> number of days in that month in that year."
assert 1 <= month <= 12, month
if month == 2 and _is_leap(year):
return 29
return _DAYS_IN_MONTH[month]
def _days_before_month(year, month):
"year, month -> number of days in year preceeding first day of month."
if not 1 <= month <= 12:
raise ValueError('month must be in 1..12', month)
return _DAYS_BEFORE_MONTH[month] + (month > 2 and _is_leap(year))
def _ymd2ord(year, month, day):
"year, month, day -> ordinal, considering 01-Jan-0001 as day 1."
if not 1 <= month <= 12:
raise ValueError('month must be in 1..12', month)
dim = _days_in_month(year, month)
if not 1 <= day <= dim:
raise ValueError('day must be in 1..%d' % dim, day)
return (_days_before_year(year) +
_days_before_month(year, month) +
day)
_DI400Y = _days_before_year(401) # number of days in 400 years
_DI100Y = _days_before_year(101) # " " " " 100 "
_DI4Y = _days_before_year(5) # " " " " 4 "
# A 4-year cycle has an extra leap day over what we'd get from pasting
# together 4 single years.
assert _DI4Y == 4 * 365 + 1
# Similarly, a 400-year cycle has an extra leap day over what we'd get from
# pasting together 4 100-year cycles.
assert _DI400Y == 4 * _DI100Y + 1
# OTOH, a 100-year cycle has one fewer leap day than we'd get from
# pasting together 25 4-year cycles.
assert _DI100Y == 25 * _DI4Y - 1
def _ord2ymd(n):
"ordinal -> (year, month, day), considering 01-Jan-0001 as day 1."
# n is a 1-based index, starting at 1-Jan-1. The pattern of leap years
# repeats exactly every 400 years. The basic strategy is to find the
# closest 400-year boundary at or before n, then work with the offset
# from that boundary to n. Life is much clearer if we subtract 1 from
# n first -- then the values of n at 400-year boundaries are exactly
# those divisible by _DI400Y:
#
# D M Y n n-1
# -- --- ---- ---------- ----------------
# 31 Dec -400 -_DI400Y -_DI400Y -1
# 1 Jan -399 -_DI400Y +1 -_DI400Y 400-year boundary
# ...
# 30 Dec 000 -1 -2
# 31 Dec 000 0 -1
# 1 Jan 001 1 0 400-year boundary
# 2 Jan 001 2 1
# 3 Jan 001 3 2
# ...
# 31 Dec 400 _DI400Y _DI400Y -1
# 1 Jan 401 _DI400Y +1 _DI400Y 400-year boundary
n -= 1
n400, n = divmod(n, _DI400Y)
year = n400 * 400 + 1 # ..., -399, 1, 401, ...
# Now n is the (non-negative) offset, in days, from January 1 of year, to
# the desired date. Now compute how many 100-year cycles precede n.
# Note that it's possible for n100 to equal 4! In that case 4 full
# 100-year cycles precede the desired day, which implies the desired
# day is December 31 at the end of a 400-year cycle.
n100, n = divmod(n, _DI100Y)
# Now compute how many 4-year cycles precede it.
n4, n = divmod(n, _DI4Y)
# And now how many single years. Again n1 can be 4, and again meaning
# that the desired day is December 31 at the end of the 4-year cycle.
n1, n = divmod(n, 365)
year += n100 * 100 + n4 * 4 + n1
if n1 == 4 or n100 == 4:
assert n == 0
return year-1, 12, 31
# Now the year is correct, and n is the offset from January 1. We find
# the month via an estimate that's either exact or one too large.
leapyear = n1 == 3 and (n4 != 24 or n100 == 3)
assert leapyear == _is_leap(year)
month = (n + 50) >> 5
preceding = _DAYS_BEFORE_MONTH[month] + (month > 2 and leapyear)
if preceding > n: # estimate is too large
month -= 1
preceding -= _DAYS_IN_MONTH[month] + (month == 2 and leapyear)
n -= preceding
assert 0 <= n < _days_in_month(year, month)
# Now the year and month are correct, and n is the offset from the
# start of that month: we're done!
return year, month, n+1
# Month and day names. For localized versions, see the calendar module.
_MONTHNAMES = [None, "Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
_DAYNAMES = [None, "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
def _build_struct_time(y, m, d, hh, mm, ss, dstflag):
wday = (_ymd2ord(y, m, d) + 6) % 7
dnum = _days_before_month(y, m) + d
return _time.struct_time((y, m, d, hh, mm, ss, wday, dnum, dstflag))
def _format_time(hh, mm, ss, us):
# Skip trailing microseconds when us==0.
result = "%02d:%02d:%02d" % (hh, mm, ss)
if us:
result += ".%06d" % us
return result
# Correctly substitute for %z and %Z escapes in strftime formats.
def _wrap_strftime(object, format, timetuple):
year = timetuple[0]
if year < 1900:
raise ValueError("year=%d is before 1900; the datetime strftime() "
"methods require year >= 1900" % year)
# Don't call _utcoffset() or tzname() unless actually needed.
zreplace = None # the string to use for %z
Zreplace = None # the string to use for %Z
# Scan format for %z and %Z escapes, replacing as needed.
newformat = []
push = newformat.append
i, n = 0, len(format)
while i < n:
ch = format[i]
i += 1
if ch == '%':
if i < n:
ch = format[i]
i += 1
if ch == 'z':
if zreplace is None:
zreplace = ""
if hasattr(object, "_utcoffset"):
offset = object._utcoffset()
if offset is not None:
sign = '+'
if offset < 0:
offset = -offset
sign = '-'
h, m = divmod(offset, 60)
zreplace = '%c%02d%02d' % (sign, h, m)
assert '%' not in zreplace
newformat.append(zreplace)
elif ch == 'Z':
if Zreplace is None:
Zreplace = ""
if hasattr(object, "tzname"):
s = object.tzname()
if s is not None:
# strftime is going to have at this: escape %
Zreplace = s.replace('%', '%%')
newformat.append(Zreplace)
else:
push('%')
push(ch)
else:
push('%')
else:
push(ch)
newformat = "".join(newformat)
return _time.strftime(newformat, timetuple)
def _call_tzinfo_method(tzinfo, methname, tzinfoarg):
if tzinfo is None:
return None
return getattr(tzinfo, methname)(tzinfoarg)
# Just raise TypeError if the arg isn't None or a string.
def _check_tzname(name):
if name is not None and not isinstance(name, str):
raise TypeError("tzinfo.tzname() must return None or string, "
"not '%s'" % type(name))
# name is the offset-producing method, "utcoffset" or "dst".
# offset is what it returned.
# If offset isn't None or timedelta, raises TypeError.
# If offset is None, returns None.
# Else offset is checked for being in range, and a whole # of minutes.
# If it is, its integer value is returned. Else ValueError is raised.
def _check_utc_offset(name, offset):
assert name in ("utcoffset", "dst")
if offset is None:
return None
if not isinstance(offset, timedelta):
raise TypeError("tzinfo.%s() must return None "
"or timedelta, not '%s'" % (name, type(offset)))
days = offset.days
if days < -1 or days > 0:
offset = 1440 # trigger out-of-range
else:
seconds = days * 86400 + offset.seconds
minutes, seconds = divmod(seconds, 60)
if seconds or offset.microseconds:
raise ValueError("tzinfo.%s() must return a whole number "
"of minutes" % name)
offset = minutes
if -1440 < offset < 1440:
return offset
raise ValueError("%s()=%d, must be in -1439..1439" % (name, offset))
def _check_date_fields(year, month, day):
if not MINYEAR <= year <= MAXYEAR:
raise ValueError('year must be in %d..%d' % (MINYEAR, MAXYEAR), year)
if not 1 <= month <= 12:
raise ValueError('month must be in 1..12', month)
dim = _days_in_month(year, month)
if not 1 <= day <= dim:
raise ValueError('day must be in 1..%d' % dim, day)
def _check_time_fields(hour, minute, second, microsecond):
if not 0 <= hour <= 23:
raise ValueError('hour must be in 0..23', hour)
if not 0 <= minute <= 59:
raise ValueError('minute must be in 0..59', minute)
if not 0 <= second <= 59:
raise ValueError('second must be in 0..59', second)
if not 0 <= microsecond <= 999999:
raise ValueError('microsecond must be in 0..999999', microsecond)
def _check_tzinfo_arg(tz):
if tz is not None and not isinstance(tz, tzinfo):
raise TypeError("tzinfo argument must be None or of a tzinfo subclass")
# Notes on comparison: In general, datetime module comparison operators raise
# TypeError when they don't know how to do a comparison themself. If they
# returned NotImplemented instead, comparison could (silently) fall back to
# the default compare-objects-by-comparing-their-memory-addresses strategy,
# and that's not helpful. There are two exceptions:
#
# 1. For date and datetime, if the other object has a "timetuple" attr,
# NotImplemented is returned. This is a hook to allow other kinds of
# datetime-like objects a chance to intercept the comparison.
#
# 2. Else __eq__ and __ne__ return False and True, respectively. This is
# so opertaions like
#
# x == y
# x != y
# x in sequence
# x not in sequence
# dict[x] = y
#
# don't raise annoying TypeErrors just because a datetime object
# is part of a heterogeneous collection. If there's no known way to
# compare X to a datetime, saying they're not equal is reasonable.
def _cmperror(x, y):
raise TypeError("can't compare '%s' to '%s'" % (
type(x).__name__, type(y).__name__))
# This is a start at a struct tm workalike. Goals:
#
# + Works the same way across platforms.
# + Handles all the fields datetime needs handled, without 1970-2038 glitches.
#
# Note: I suspect it's best if this flavor of tm does *not* try to
# second-guess timezones or DST. Instead fold whatever adjustments you want
# into the minutes argument (and the constructor will normalize).
_ORD1970 = _ymd2ord(1970, 1, 1) # base ordinal for UNIX epoch
class tmxxx:
ordinal = None
def __init__(self, year, month, day, hour=0, minute=0, second=0,
microsecond=0):
# Normalize all the inputs, and store the normalized values.
if not 0 <= microsecond <= 999999:
carry, microsecond = divmod(microsecond, 1000000)
second += carry
if not 0 <= second <= 59:
carry, second = divmod(second, 60)
minute += carry
if not 0 <= minute <= 59:
carry, minute = divmod(minute, 60)
hour += carry
if not 0 <= hour <= 23:
carry, hour = divmod(hour, 24)
day += carry
# That was easy. Now it gets muddy: the proper range for day
# can't be determined without knowing the correct month and year,
# but if day is, e.g., plus or minus a million, the current month
# and year values make no sense (and may also be out of bounds
# themselves).
# Saying 12 months == 1 year should be non-controversial.
if not 1 <= month <= 12:
carry, month = divmod(month-1, 12)
year += carry
month += 1
assert 1 <= month <= 12
# Now only day can be out of bounds (year may also be out of bounds
# for a datetime object, but we don't care about that here).
# If day is out of bounds, what to do is arguable, but at least the
# method here is principled and explainable.
dim = _days_in_month(year, month)
if not 1 <= day <= dim:
# Move day-1 days from the first of the month. First try to
# get off cheap if we're only one day out of range (adjustments
# for timezone alone can't be worse than that).
if day == 0: # move back a day
month -= 1
if month > 0:
day = _days_in_month(year, month)
else:
year, month, day = year-1, 12, 31
elif day == dim + 1: # move forward a day
month += 1
day = 1
if month > 12:
month = 1
year += 1
else:
self.ordinal = _ymd2ord(year, month, 1) + (day - 1)
year, month, day = _ord2ymd(self.ordinal)
self.year, self.month, self.day = year, month, day
self.hour, self.minute, self.second = hour, minute, second
self.microsecond = microsecond
def toordinal(self):
"""Return proleptic Gregorian ordinal for the year, month and day.
January 1 of year 1 is day 1. Only the year, month and day values
contribute to the result.
"""
if self.ordinal is None:
self.ordinal = _ymd2ord(self.year, self.month, self.day)
return self.ordinal
def time(self):
"Return Unixish timestamp, as a float (assuming UTC)."
days = self.toordinal() - _ORD1970 # convert to UNIX epoch
seconds = ((days * 24. + self.hour)*60. + self.minute)*60.
return seconds + self.second + self.microsecond / 1e6
def ctime(self):
"Return ctime() style string."
weekday = self.toordinal() % 7 or 7
return "%s %s %2d %02d:%02d:%02d %04d" % (
_DAYNAMES[weekday],
_MONTHNAMES[self.month],
self.day,
self.hour, self.minute, self.second,
self.year)
class timedelta(object):
"""Represent the difference between two datetime objects.
Supported operators:
- add, subtract timedelta
- unary plus, minus, abs
- compare to timedelta
- multiply, divide by int/long
In addition, datetime supports subtraction of two datetime objects
returning a timedelta, and addition or subtraction of a datetime
and a timedelta giving a datetime.
Representation: (days, seconds, microseconds). Why? Because I
felt like it.
"""
def __new__(cls, days=0, seconds=0, microseconds=0,
# XXX The following should only be used as keyword args:
milliseconds=0, minutes=0, hours=0, weeks=0):
# Doing this efficiently and accurately in C is going to be difficult
# and error-prone, due to ubiquitous overflow possibilities, and that
# C double doesn't have enough bits of precision to represent
# microseconds over 10K years faithfully. The code here tries to make
# explicit where go-fast assumptions can be relied on, in order to
# guide the C implementation; it's way more convoluted than speed-
# ignoring auto-overflow-to-long idiomatic Python could be.
# XXX Check that all inputs are ints, longs or floats.
# Final values, all integer.
# s and us fit in 32-bit signed ints; d isn't bounded.
d = s = us = 0
# Normalize everything to days, seconds, microseconds.
days += weeks*7
seconds += minutes*60 + hours*3600
microseconds += milliseconds*1000
# Get rid of all fractions, and normalize s and us.
# Take a deep breath <wink>.
if isinstance(days, float):
dayfrac, days = _math.modf(days)
daysecondsfrac, daysecondswhole = _math.modf(dayfrac * (24.*3600.))
assert daysecondswhole == int(daysecondswhole) # can't overflow
s = int(daysecondswhole)
assert days == long(days)
d = long(days)
else:
daysecondsfrac = 0.0
d = days
assert isinstance(daysecondsfrac, float)
assert abs(daysecondsfrac) <= 1.0
assert isinstance(d, (int, long))
assert abs(s) <= 24 * 3600
# days isn't referenced again before redefinition
if isinstance(seconds, float):
secondsfrac, seconds = _math.modf(seconds)
assert seconds == long(seconds)
seconds = long(seconds)
secondsfrac += daysecondsfrac
assert abs(secondsfrac) <= 2.0
else:
secondsfrac = daysecondsfrac
# daysecondsfrac isn't referenced again
assert isinstance(secondsfrac, float)
assert abs(secondsfrac) <= 2.0
assert isinstance(seconds, (int, long))
days, seconds = divmod(seconds, 24*3600)
d += days
s += int(seconds) # can't overflow
assert isinstance(s, int)
assert abs(s) <= 2 * 24 * 3600
# seconds isn't referenced again before redefinition
usdouble = secondsfrac * 1e6
assert abs(usdouble) < 2.1e6 # exact value not critical
# secondsfrac isn't referenced again
if isinstance(microseconds, float):
microseconds += usdouble
microseconds = round(microseconds)
seconds, microseconds = divmod(microseconds, 1e6)
assert microseconds == int(microseconds)
assert seconds == long(seconds)
days, seconds = divmod(seconds, 24.*3600.)
assert days == long(days)
assert seconds == int(seconds)
d += long(days)
s += int(seconds) # can't overflow
assert isinstance(s, int)
assert abs(s) <= 3 * 24 * 3600
else:
seconds, microseconds = divmod(microseconds, 1000000)
days, seconds = divmod(seconds, 24*3600)
d += days
s += int(seconds) # can't overflow
assert isinstance(s, int)
assert abs(s) <= 3 * 24 * 3600
microseconds = float(microseconds)
microseconds += usdouble
microseconds = round(microseconds)
assert abs(s) <= 3 * 24 * 3600
assert abs(microseconds) < 3.1e6
# Just a little bit of carrying possible for microseconds and seconds.
assert isinstance(microseconds, float)
assert int(microseconds) == microseconds
us = int(microseconds)
seconds, us = divmod(us, 1000000)
s += seconds # cant't overflow
assert isinstance(s, int)
days, s = divmod(s, 24*3600)
d += days
assert isinstance(d, (int, long))
assert isinstance(s, int) and 0 <= s < 24*3600
assert isinstance(us, int) and 0 <= us < 1000000
self = object.__new__(cls)
self.__days = d
self.__seconds = s
self.__microseconds = us
if abs(d) > 999999999:
raise OverflowError("timedelta # of days is too large: %d" % d)
return self
def __repr__(self):
if self.__microseconds:
return "%s(%d, %d, %d)" % ('datetime.' + self.__class__.__name__,
self.__days,
self.__seconds,
self.__microseconds)
if self.__seconds:
return "%s(%d, %d)" % ('datetime.' + self.__class__.__name__,
self.__days,
self.__seconds)
return "%s(%d)" % ('datetime.' + self.__class__.__name__, self.__days)
def __str__(self):
mm, ss = divmod(self.__seconds, 60)
hh, mm = divmod(mm, 60)
s = "%d:%02d:%02d" % (hh, mm, ss)
if self.__days:
def plural(n):
return n, abs(n) != 1 and "s" or ""
s = ("%d day%s, " % plural(self.__days)) + s
if self.__microseconds:
s = s + ".%06d" % self.__microseconds
return s
days = property(lambda self: self.__days, doc="days")
seconds = property(lambda self: self.__seconds, doc="seconds")
microseconds = property(lambda self: self.__microseconds,
doc="microseconds")
def __add__(self, other):
if isinstance(other, timedelta):
# for CPython compatibility, we cannot use
# our __class__ here, but need a real timedelta
return timedelta(self.__days + other.__days,
self.__seconds + other.__seconds,
self.__microseconds + other.__microseconds)
return NotImplemented
__radd__ = __add__
def __sub__(self, other):
if isinstance(other, timedelta):
return self + -other
return NotImplemented
def __rsub__(self, other):
if isinstance(other, timedelta):
return -self + other
return NotImplemented
def __neg__(self):
# for CPython compatibility, we cannot use
# our __class__ here, but need a real timedelta
return timedelta(-self.__days,
-self.__seconds,
-self.__microseconds)
def __pos__(self):
return self
def __abs__(self):
if self.__days < 0:
return -self
else:
return self
def __mul__(self, other):
if isinstance(other, (int, long)):
# for CPython compatibility, we cannot use
# our __class__ here, but need a real timedelta
return timedelta(self.__days * other,
self.__seconds * other,
self.__microseconds * other)
return NotImplemented
__rmul__ = __mul__
def __div__(self, other):
if isinstance(other, (int, long)):
usec = ((self.__days * (24*3600L) + self.__seconds) * 1000000 +
self.__microseconds)
return timedelta(0, 0, usec // other)
return NotImplemented
__floordiv__ = __div__
# Comparisons.
def __eq__(self, other):
if isinstance(other, timedelta):
return self.__cmp(other) == 0
else:
return False
def __ne__(self, other):
if isinstance(other, timedelta):
return self.__cmp(other) != 0
else:
return True
def __le__(self, other):
if isinstance(other, timedelta):
return self.__cmp(other) <= 0
else:
_cmperror(self, other)
def __lt__(self, other):
if isinstance(other, timedelta):
return self.__cmp(other) < 0
else:
_cmperror(self, other)
def __ge__(self, other):
if isinstance(other, timedelta):
return self.__cmp(other) >= 0
else:
_cmperror(self, other)
def __gt__(self, other):
if isinstance(other, timedelta):
return self.__cmp(other) > 0
else:
_cmperror(self, other)
def __cmp(self, other):
assert isinstance(other, timedelta)
return cmp(self.__getstate(), other.__getstate())
def __hash__(self):
return hash(self.__getstate())
def __nonzero__(self):
return (self.__days != 0 or
self.__seconds != 0 or
self.__microseconds != 0)
# Pickle support.
__safe_for_unpickling__ = True # For Python 2.2
def __getstate(self):
return (self.__days, self.__seconds, self.__microseconds)
def __reduce__(self):
return (self.__class__, self.__getstate())
timedelta.min = timedelta(-999999999)
timedelta.max = timedelta(days=999999999, hours=23, minutes=59, seconds=59,
microseconds=999999)
timedelta.resolution = timedelta(microseconds=1)
class date(object):
"""Concrete date type.
Constructors:
__new__()
fromtimestamp()
today()
fromordinal()
Operators:
__repr__, __str__
__cmp__, __hash__
__add__, __radd__, __sub__ (add/radd only with timedelta arg)
Methods:
timetuple()
toordinal()
weekday()
isoweekday(), isocalendar(), isoformat()
ctime()
strftime()
Properties (readonly):
year, month, day
"""
def __new__(cls, year, month=None, day=None):
"""Constructor.
Arguments:
year, month, day (required, base 1)
"""
if isinstance(year, str):
# Pickle support
self = object.__new__(cls)
self.__setstate(year)
return self
_check_date_fields(year, month, day)
self = object.__new__(cls)
self.__year = year
self.__month = month
self.__day = day
return self
# Additional constructors
def fromtimestamp(cls, t):
"Construct a date from a POSIX timestamp (like time.time())."
y, m, d, hh, mm, ss, weekday, jday, dst = _time.localtime(t)
return cls(y, m, d)
fromtimestamp = classmethod(fromtimestamp)
def today(cls):
"Construct a date from time.time()."
t = _time.time()
return cls.fromtimestamp(t)
today = classmethod(today)
def fromordinal(cls, n):
"""Contruct a date from a proleptic Gregorian ordinal.
January 1 of year 1 is day 1. Only the year, month and day are
non-zero in the result.
"""
y, m, d = _ord2ymd(n)
return cls(y, m, d)
fromordinal = classmethod(fromordinal)
# Conversions to string
def __repr__(self):
"Convert to formal string, for repr()."
return "%s(%d, %d, %d)" % ('datetime.' + self.__class__.__name__,
self.__year,
self.__month,
self.__day)
# XXX These shouldn't depend on time.localtime(), because that
# clips the usable dates to [1970 .. 2038). At least ctime() is
# easily done without using strftime() -- that's better too because
# strftime("%c", ...) is locale specific.
def ctime(self):
"Format a la ctime()."
return tmxxx(self.__year, self.__month, self.__day).ctime()
def strftime(self, fmt):
"Format using strftime()."
return _wrap_strftime(self, fmt, self.timetuple())
def isoformat(self):
"""Return the date formatted according to ISO.
This is 'YYYY-MM-DD'.
References:
- http://www.w3.org/TR/NOTE-datetime
- http://www.cl.cam.ac.uk/~mgk25/iso-time.html
"""
return "%04d-%02d-%02d" % (self.__year, self.__month, self.__day)
__str__ = isoformat
# Read-only field accessors
year = property(lambda self: self.__year,
doc="year (%d-%d)" % (MINYEAR, MAXYEAR))
month = property(lambda self: self.__month, doc="month (1-12)")
day = property(lambda self: self.__day, doc="day (1-31)")
# Standard conversions, __cmp__, __hash__ (and helpers)
def timetuple(self):
"Return local time tuple compatible with time.localtime()."
return _build_struct_time(self.__year, self.__month, self.__day,
0, 0, 0, -1)
def toordinal(self):
"""Return proleptic Gregorian ordinal for the year, month and day.
January 1 of year 1 is day 1. Only the year, month and day values
contribute to the result.
"""
return _ymd2ord(self.__year, self.__month, self.__day)
def replace(self, year=None, month=None, day=None):
"""Return a new date with new values for the specified fields."""
if year is None:
year = self.__year
if month is None:
month = self.__month
if day is None:
day = self.__day
_check_date_fields(year, month, day)
return date(year, month, day)
# Comparisons.
def __eq__(self, other):
if isinstance(other, date):
return self.__cmp(other) == 0
elif hasattr(other, "timetuple"):
return NotImplemented
else:
return False
def __ne__(self, other):
if isinstance(other, date):
return self.__cmp(other) != 0
elif hasattr(other, "timetuple"):
return NotImplemented
else:
return True
def __le__(self, other):
if isinstance(other, date):
return self.__cmp(other) <= 0
elif hasattr(other, "timetuple"):
return NotImplemented
else:
_cmperror(self, other)
def __lt__(self, other):
if isinstance(other, date):
return self.__cmp(other) < 0
elif hasattr(other, "timetuple"):
return NotImplemented
else:
_cmperror(self, other)
def __ge__(self, other):
if isinstance(other, date):
return self.__cmp(other) >= 0
elif hasattr(other, "timetuple"):
return NotImplemented
else:
_cmperror(self, other)
def __gt__(self, other):
if isinstance(other, date):
return self.__cmp(other) > 0
elif hasattr(other, "timetuple"):
return NotImplemented
else:
_cmperror(self, other)
def __cmp(self, other):
assert isinstance(other, date)
y, m, d = self.__year, self.__month, self.__day
y2, m2, d2 = other.__year, other.__month, other.__day
return cmp((y, m, d), (y2, m2, d2))
def __hash__(self):
"Hash."
return hash(self.__getstate())
# Computations
def _checkOverflow(self, year):
if not MINYEAR <= year <= MAXYEAR:
raise OverflowError("date +/-: result year %d not in %d..%d" %
(year, MINYEAR, MAXYEAR))
def __add__(self, other):
"Add a date to a timedelta."
if isinstance(other, timedelta):
t = tmxxx(self.__year,
self.__month,
self.__day + other.days)
self._checkOverflow(t.year)
result = date(t.year, t.month, t.day)
return result
raise TypeError
# XXX Should be 'return NotImplemented', but there's a bug in 2.2...
__radd__ = __add__
def __sub__(self, other):
"""Subtract two dates, or a date and a timedelta."""
if isinstance(other, timedelta):
return self + timedelta(-other.days)
if isinstance(other, date):
days1 = self.toordinal()
days2 = other.toordinal()
return timedelta(days1 - days2)
return NotImplemented
def weekday(self):
"Return day of the week, where Monday == 0 ... Sunday == 6."
return (self.toordinal() + 6) % 7
# Day-of-the-week and week-of-the-year, according to ISO
def isoweekday(self):
"Return day of the week, where Monday == 1 ... Sunday == 7."
# 1-Jan-0001 is a Monday
return self.toordinal() % 7 or 7
def isocalendar(self):
"""Return a 3-tuple containing ISO year, week number, and weekday.
The first ISO week of the year is the (Mon-Sun) week
containing the year's first Thursday; everything else derives
from that.
The first week is 1; Monday is 1 ... Sunday is 7.
ISO calendar algorithm taken from
http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm
"""
year = self.__year
week1monday = _isoweek1monday(year)
today = _ymd2ord(self.__year, self.__month, self.__day)
# Internally, week and day have origin 0
week, day = divmod(today - week1monday, 7)
if week < 0:
year -= 1
week1monday = _isoweek1monday(year)
week, day = divmod(today - week1monday, 7)
elif week >= 52:
if today >= _isoweek1monday(year+1):
year += 1
week = 0
return year, week+1, day+1
# Pickle support.
__safe_for_unpickling__ = True # For Python 2.2
def __getstate(self):
yhi, ylo = divmod(self.__year, 256)
return ("%c%c%c%c" % (yhi, ylo, self.__month, self.__day), )
def __setstate(self, string):
if len(string) != 4 or not (1 <= ord(string[2]) <= 12):
raise TypeError("not enough arguments")
yhi, ylo, self.__month, self.__day = map(ord, string)
self.__year = yhi * 256 + ylo
def __reduce__(self):
return (self.__class__, self.__getstate())
def __tojava__(self, java_class):
from java.lang import Object
from java.sql import Date
from java.util import Calendar
from org.python.core import Py
if java_class not in (Calendar, Date, Object):
return Py.NoConversion
calendar = Calendar.getInstance()
calendar.clear()
calendar.set(self.year, self.month - 1, self.day)
if java_class == Calendar:
return calendar
else:
return Date(calendar.getTimeInMillis())
_date_class = date # so functions w/ args named "date" can get at the class
date.min = date(1, 1, 1)
date.max = date(9999, 12, 31)
date.resolution = timedelta(days=1)
class tzinfo(object):
"""Abstract base class for time zone info classes.
Subclasses must override the name(), utcoffset() and dst() methods.
"""
def tzname(self, dt):
"datetime -> string name of time zone."
raise NotImplementedError("tzinfo subclass must override tzname()")
def utcoffset(self, dt):
"datetime -> minutes east of UTC (negative for west of UTC)"
raise NotImplementedError("tzinfo subclass must override utcoffset()")
def dst(self, dt):
"""datetime -> DST offset in minutes east of UTC.
Return 0 if DST not in effect. utcoffset() must include the DST
offset.
"""
raise NotImplementedError("tzinfo subclass must override dst()")
def fromutc(self, dt):
"datetime in UTC -> datetime in local time."
if not isinstance(dt, datetime):
raise TypeError("fromutc() requires a datetime argument")
if dt.tzinfo is not self:
raise ValueError("dt.tzinfo is not self")
dtoff = dt.utcoffset()
if dtoff is None:
raise ValueError("fromutc() requires a non-None utcoffset() "
"result")
# See the long comment block at the end of this file for an
# explanation of this algorithm.
dtdst = dt.dst()
if dtdst is None:
raise ValueError("fromutc() requires a non-None dst() result")
delta = dtoff - dtdst
if delta:
dt += delta
dtdst = dt.dst()
if dtdst is None:
raise ValueError("fromutc(): dt.dst gave inconsistent "
"results; cannot convert")
if dtdst:
return dt + dtdst
else:
return dt
# Pickle support.
__safe_for_unpickling__ = True # For Python 2.2
def __reduce__(self):
getinitargs = getattr(self, "__getinitargs__", None)
if getinitargs:
args = getinitargs()
else:
args = ()
getstate = getattr(self, "__getstate__", None)
if getstate:
state = getstate()
else:
state = getattr(self, "__dict__", None) or None
if state is None:
return (self.__class__, args)
else:
return (self.__class__, args, state)
_tzinfo_class = tzinfo # so functions w/ args named "tinfo" can get at it
class time(object):
"""Time with time zone.
Constructors:
__new__()
Operators:
__repr__, __str__
__cmp__, __hash__
Methods:
strftime()
isoformat()
utcoffset()
tzname()
dst()
Properties (readonly):
hour, minute, second, microsecond, tzinfo
"""
def __new__(cls, hour=0, minute=0, second=0, microsecond=0, tzinfo=None):
"""Constructor.
Arguments:
hour, minute (required)
second, microsecond (default to zero)
tzinfo (default to None)
"""
self = object.__new__(cls)
if isinstance(hour, str):
# Pickle support
self.__setstate(hour, minute or None)
return self
_check_tzinfo_arg(tzinfo)
_check_time_fields(hour, minute, second, microsecond)
self.__hour = hour
self.__minute = minute
self.__second = second
self.__microsecond = microsecond
self._tzinfo = tzinfo
return self
# Read-only field accessors
hour = property(lambda self: self.__hour, doc="hour (0-23)")
minute = property(lambda self: self.__minute, doc="minute (0-59)")
second = property(lambda self: self.__second, doc="second (0-59)")
microsecond = property(lambda self: self.__microsecond,
doc="microsecond (0-999999)")
tzinfo = property(lambda self: self._tzinfo, doc="timezone info object")
# Standard conversions, __hash__ (and helpers)
# Comparisons.
def __eq__(self, other):
if isinstance(other, time):
return self.__cmp(other) == 0
else:
return False
def __ne__(self, other):
if isinstance(other, time):
return self.__cmp(other) != 0
else:
return True
def __le__(self, other):
if isinstance(other, time):
return self.__cmp(other) <= 0
else:
_cmperror(self, other)
def __lt__(self, other):
if isinstance(other, time):
return self.__cmp(other) < 0
else:
_cmperror(self, other)
def __ge__(self, other):
if isinstance(other, time):
return self.__cmp(other) >= 0
else:
_cmperror(self, other)
def __gt__(self, other):
if isinstance(other, time):
return self.__cmp(other) > 0
else:
_cmperror(self, other)
def __cmp(self, other):
assert isinstance(other, time)
mytz = self._tzinfo
ottz = other._tzinfo
myoff = otoff = None
if mytz is ottz:
base_compare = True
else:
myoff = self._utcoffset()
otoff = other._utcoffset()
base_compare = myoff == otoff
if base_compare:
return cmp((self.__hour, self.__minute, self.__second,
self.__microsecond),
(other.__hour, other.__minute, other.__second,
other.__microsecond))
if myoff is None or otoff is None:
# XXX Buggy in 2.2.2.
raise TypeError("cannot compare naive and aware times")
myhhmm = self.__hour * 60 + self.__minute - myoff
othhmm = other.__hour * 60 + other.__minute - otoff
return cmp((myhhmm, self.__second, self.__microsecond),
(othhmm, other.__second, other.__microsecond))
def __hash__(self):
"""Hash."""
tzoff = self._utcoffset()
if not tzoff: # zero or None
return hash(self.__getstate()[0])
h, m = divmod(self.hour * 60 + self.minute - tzoff, 60)
if 0 <= h < 24:
return hash(time(h, m, self.second, self.microsecond))
return hash((h, m, self.second, self.microsecond))
# Conversion to string
def _tzstr(self, sep=":"):
"""Return formatted timezone offset (+xx:xx) or None."""
off = self._utcoffset()
if off is not None:
if off < 0:
sign = "-"
off = -off
else:
sign = "+"
hh, mm = divmod(off, 60)
assert 0 <= hh < 24
off = "%s%02d%s%02d" % (sign, hh, sep, mm)
return off
def __repr__(self):
"""Convert to formal string, for repr()."""
if self.__microsecond != 0:
s = ", %d, %d" % (self.__second, self.__microsecond)
elif self.__second != 0:
s = ", %d" % self.__second
else:
s = ""
s= "%s(%d, %d%s)" % ('datetime.' + self.__class__.__name__,
self.__hour, self.__minute, s)
if self._tzinfo is not None:
assert s[-1:] == ")"
s = s[:-1] + ", tzinfo=%r" % self._tzinfo + ")"
return s
def isoformat(self):
"""Return the time formatted according to ISO.
This is 'HH:MM:SS.mmmmmm+zz:zz', or 'HH:MM:SS+zz:zz' if
self.microsecond == 0.
"""
s = _format_time(self.__hour, self.__minute, self.__second,
self.__microsecond)
tz = self._tzstr()
if tz:
s += tz
return s
__str__ = isoformat
def strftime(self, fmt):
"""Format using strftime(). The date part of the timestamp passed
to underlying strftime should not be used.
"""
# The year must be >= 1900 else Python's strftime implementation
# can raise a bogus exception.
timetuple = (1900, 1, 1,
self.__hour, self.__minute, self.__second,
0, 1, -1)
return _wrap_strftime(self, fmt, timetuple)
# Timezone functions
def utcoffset(self):
"""Return the timezone offset in minutes east of UTC (negative west of
UTC)."""
offset = _call_tzinfo_method(self._tzinfo, "utcoffset", None)
offset = _check_utc_offset("utcoffset", offset)
if offset is not None:
offset = timedelta(minutes=offset)
return offset
# Return an integer (or None) instead of a timedelta (or None).
def _utcoffset(self):
offset = _call_tzinfo_method(self._tzinfo, "utcoffset", None)
offset = _check_utc_offset("utcoffset", offset)
return offset
def tzname(self):
"""Return the timezone name.
Note that the name is 100% informational -- there's no requirement that
it mean anything in particular. For example, "GMT", "UTC", "-500",
"-5:00", "EDT", "US/Eastern", "America/New York" are all valid replies.
"""
name = _call_tzinfo_method(self._tzinfo, "tzname", None)
_check_tzname(name)
return name
def dst(self):
"""Return 0 if DST is not in effect, or the DST offset (in minutes
eastward) if DST is in effect.
This is purely informational; the DST offset has already been added to
the UTC offset returned by utcoffset() if applicable, so there's no
need to consult dst() unless you're interested in displaying the DST
info.
"""
offset = _call_tzinfo_method(self._tzinfo, "dst", None)
offset = _check_utc_offset("dst", offset)
if offset is not None:
offset = timedelta(minutes=offset)
return offset
def replace(self, hour=None, minute=None, second=None, microsecond=None,
tzinfo=True):
"""Return a new time with new values for the specified fields."""
if hour is None:
hour = self.hour
if minute is None:
minute = self.minute
if second is None:
second = self.second
if microsecond is None:
microsecond = self.microsecond
if tzinfo is True:
tzinfo = self.tzinfo
_check_time_fields(hour, minute, second, microsecond)
_check_tzinfo_arg(tzinfo)
return time(hour, minute, second, microsecond, tzinfo)
# Return an integer (or None) instead of a timedelta (or None).
def _dst(self):
offset = _call_tzinfo_method(self._tzinfo, "dst", None)
offset = _check_utc_offset("dst", offset)
return offset
def __nonzero__(self):
if self.second or self.microsecond:
return 1
offset = self._utcoffset() or 0
return self.hour * 60 + self.minute - offset != 0
# Pickle support.
__safe_for_unpickling__ = True # For Python 2.2
def __getstate(self):
us2, us3 = divmod(self.__microsecond, 256)
us1, us2 = divmod(us2, 256)
basestate = ("%c" * 6) % (self.__hour, self.__minute, self.__second,
us1, us2, us3)
if self._tzinfo is None:
return (basestate,)
else:
return (basestate, self._tzinfo)
def __setstate(self, string, tzinfo):
if len(string) != 6 or ord(string[0]) >= 24:
raise TypeError("an integer is required")
self.__hour, self.__minute, self.__second, us1, us2, us3 = \
map(ord, string)
self.__microsecond = (((us1 << 8) | us2) << 8) | us3
self._tzinfo = tzinfo
def __reduce__(self):
return (time, self.__getstate())
def __tojava__(self, java_class):
# TODO, if self.tzinfo is not None, convert time to UTC
from java.lang import Object
from java.sql import Time
from java.util import Calendar
from org.python.core import Py
if java_class not in (Calendar, Time, Object):
return Py.NoConversion
calendar = Calendar.getInstance()
calendar.clear()
calendar.set(Calendar.HOUR_OF_DAY, self.hour)
calendar.set(Calendar.MINUTE, self.minute)
calendar.set(Calendar.SECOND, self.second)
calendar.set(Calendar.MILLISECOND, self.microsecond // 1000)
if java_class == Calendar:
return calendar
else:
return Time(calendar.getTimeInMillis())
_time_class = time # so functions w/ args named "time" can get at the class
time.min = time(0, 0, 0)
time.max = time(23, 59, 59, 999999)
time.resolution = timedelta(microseconds=1)
class datetime(date):
# XXX needs docstrings
# See http://www.zope.org/Members/fdrake/DateTimeWiki/TimeZoneInfo
def __new__(cls, year, month=None, day=None, hour=0, minute=0, second=0,
microsecond=0, tzinfo=None):
if isinstance(year, str):
# Pickle support
self = date.__new__(cls, year[:4])
self.__setstate(year, month)
return self
_check_tzinfo_arg(tzinfo)
_check_time_fields(hour, minute, second, microsecond)
self = date.__new__(cls, year, month, day)
# XXX This duplicates __year, __month, __day for convenience :-(
self.__year = year
self.__month = month
self.__day = day
self.__hour = hour
self.__minute = minute
self.__second = second
self.__microsecond = microsecond
self._tzinfo = tzinfo
return self
# Read-only field accessors
hour = property(lambda self: self.__hour, doc="hour (0-23)")
minute = property(lambda self: self.__minute, doc="minute (0-59)")
second = property(lambda self: self.__second, doc="second (0-59)")
microsecond = property(lambda self: self.__microsecond,
doc="microsecond (0-999999)")
tzinfo = property(lambda self: self._tzinfo, doc="timezone info object")
def fromtimestamp(cls, t, tz=None):
"""Construct a datetime from a POSIX timestamp (like time.time()).
A timezone info object may be passed in as well.
"""
_check_tzinfo_arg(tz)
if tz is None:
converter = _time.localtime
else:
converter = _time.gmtime
y, m, d, hh, mm, ss, weekday, jday, dst = converter(t)
us = int((t % 1.0) * 1000000)
if us == 1000001 or us == 999999:
us = 0
rounded = True
else:
rounded = False
ss = min(ss, 59) # clamp out leap seconds if the platform has them
result = cls(y, m, d, hh, mm, ss, us, tz)
if rounded:
result += timedelta(seconds=1)
if tz is not None:
result = tz.fromutc(result)
return result
fromtimestamp = classmethod(fromtimestamp)
def utcfromtimestamp(cls, t):
"Construct a UTC datetime from a POSIX timestamp (like time.time())."
y, m, d, hh, mm, ss, weekday, jday, dst = _time.gmtime(t)
us = int((t % 1.0) * 1000000)
ss = min(ss, 59) # clamp out leap seconds if the platform has them
return cls(y, m, d, hh, mm, ss, us)
utcfromtimestamp = classmethod(utcfromtimestamp)
# XXX This is supposed to do better than we *can* do by using time.time(),
# XXX if the platform supports a more accurate way. The C implementation
# XXX uses gettimeofday on platforms that have it, but that isn't
# XXX available from Python. So now() may return different results
# XXX across the implementations.
def now(cls, tz=None):
"Construct a datetime from time.time() and optional time zone info."
t = _time.time()
return cls.fromtimestamp(t, tz)
now = classmethod(now)
def utcnow(cls):
"Construct a UTC datetime from time.time()."
t = _time.time()
return cls.utcfromtimestamp(t)
utcnow = classmethod(utcnow)
def combine(cls, date, time):
"Construct a datetime from a given date and a given time."
if not isinstance(date, _date_class):
raise TypeError("date argument must be a date instance")
if not isinstance(time, _time_class):
raise TypeError("time argument must be a time instance")
return cls(date.year, date.month, date.day,
time.hour, time.minute, time.second, time.microsecond,
time.tzinfo)
combine = classmethod(combine)
def strptime(cls, date_string, format):
"""datetime(year, month, day[, hour[, minute[, second[, microsecond[,tzinfo]]]]])
The year, month and day arguments are required. tzinfo may be None, or an
instance of a tzinfo subclass. The remaining arguments may be ints or longs."""
return cls(*(_time.strptime(date_string, format))[0:6])
strptime = classmethod(strptime)
def timetuple(self):
"Return local time tuple compatible with time.localtime()."
dst = self._dst()
if dst is None:
dst = -1
elif dst:
dst = 1
return _build_struct_time(self.year, self.month, self.day,
self.hour, self.minute, self.second,
dst)
def utctimetuple(self):
"Return UTC time tuple compatible with time.gmtime()."
y, m, d = self.year, self.month, self.day
hh, mm, ss = self.hour, self.minute, self.second
offset = self._utcoffset()
if offset: # neither None nor 0
tm = tmxxx(y, m, d, hh, mm - offset)
y, m, d = tm.year, tm.month, tm.day
hh, mm = tm.hour, tm.minute
return _build_struct_time(y, m, d, hh, mm, ss, 0)
def date(self):
"Return the date part."
return date(self.__year, self.__month, self.__day)
def time(self):
"Return the time part, with tzinfo None."
return time(self.hour, self.minute, self.second, self.microsecond)
def timetz(self):
"Return the time part, with same tzinfo."
return time(self.hour, self.minute, self.second, self.microsecond,
self._tzinfo)
def replace(self, year=None, month=None, day=None, hour=None,
minute=None, second=None, microsecond=None, tzinfo=True):
"""Return a new datetime with new values for the specified fields."""
if year is None:
year = self.year
if month is None:
month = self.month
if day is None:
day = self.day
if hour is None:
hour = self.hour
if minute is None:
minute = self.minute
if second is None:
second = self.second
if microsecond is None:
microsecond = self.microsecond
if tzinfo is True:
tzinfo = self.tzinfo
_check_date_fields(year, month, day)
_check_time_fields(hour, minute, second, microsecond)
_check_tzinfo_arg(tzinfo)
return datetime(year, month, day, hour, minute, second,
microsecond, tzinfo)
def astimezone(self, tz):
if not isinstance(tz, tzinfo):
raise TypeError("tz argument must be an instance of tzinfo")
mytz = self.tzinfo
if mytz is None:
raise ValueError("astimezone() requires an aware datetime")
if tz is mytz:
return self
# Convert self to UTC, and attach the new time zone object.
myoffset = self.utcoffset()
if myoffset is None:
raise ValueError("astimezone() requires an aware datetime")
utc = (self - myoffset).replace(tzinfo=tz)
# Convert from UTC to tz's local time.
return tz.fromutc(utc)
# Ways to produce a string.
def ctime(self):
"Format a la ctime()."
t = tmxxx(self.__year, self.__month, self.__day, self.__hour,
self.__minute, self.__second)
return t.ctime()
def isoformat(self, sep='T'):
"""Return the time formatted according to ISO.
This is 'YYYY-MM-DD HH:MM:SS.mmmmmm', or 'YYYY-MM-DD HH:MM:SS' if
self.microsecond == 0.
If self.tzinfo is not None, the UTC offset is also attached, giving
'YYYY-MM-DD HH:MM:SS.mmmmmm+HH:MM' or 'YYYY-MM-DD HH:MM:SS+HH:MM'.
Optional argument sep specifies the separator between date and
time, default 'T'.
"""
s = ("%04d-%02d-%02d%c" % (self.__year, self.__month, self.__day,
sep) +
_format_time(self.__hour, self.__minute, self.__second,
self.__microsecond))
off = self._utcoffset()
if off is not None:
if off < 0:
sign = "-"
off = -off
else:
sign = "+"
hh, mm = divmod(off, 60)
s += "%s%02d:%02d" % (sign, hh, mm)
return s
def __repr__(self):
"Convert to formal string, for repr()."
L = [self.__year, self.__month, self.__day, # These are never zero
self.__hour, self.__minute, self.__second, self.__microsecond]
if L[-1] == 0:
del L[-1]
if L[-1] == 0:
del L[-1]
s = ", ".join(map(str, L))
s = "%s(%s)" % ('datetime.' + self.__class__.__name__, s)
if self._tzinfo is not None:
assert s[-1:] == ")"
s = s[:-1] + ", tzinfo=%r" % self._tzinfo + ")"
return s
def __str__(self):
"Convert to string, for str()."
return self.isoformat(sep=' ')
def utcoffset(self):
"""Return the timezone offset in minutes east of UTC (negative west of
UTC)."""
offset = _call_tzinfo_method(self._tzinfo, "utcoffset", self)
offset = _check_utc_offset("utcoffset", offset)
if offset is not None:
offset = timedelta(minutes=offset)
return offset
# Return an integer (or None) instead of a timedelta (or None).
def _utcoffset(self):
offset = _call_tzinfo_method(self._tzinfo, "utcoffset", self)
offset = _check_utc_offset("utcoffset", offset)
return offset
def tzname(self):
"""Return the timezone name.
Note that the name is 100% informational -- there's no requirement that
it mean anything in particular. For example, "GMT", "UTC", "-500",
"-5:00", "EDT", "US/Eastern", "America/New York" are all valid replies.
"""
name = _call_tzinfo_method(self._tzinfo, "tzname", self)
_check_tzname(name)
return name
def dst(self):
"""Return 0 if DST is not in effect, or the DST offset (in minutes
eastward) if DST is in effect.
This is purely informational; the DST offset has already been added to
the UTC offset returned by utcoffset() if applicable, so there's no
need to consult dst() unless you're interested in displaying the DST
info.
"""
offset = _call_tzinfo_method(self._tzinfo, "dst", self)
offset = _check_utc_offset("dst", offset)
if offset is not None:
offset = timedelta(minutes=offset)
return offset
# Return an integer (or None) instead of a timedelta (or None).1573
def _dst(self):
offset = _call_tzinfo_method(self._tzinfo, "dst", self)
offset = _check_utc_offset("dst", offset)
return offset
# Comparisons.
def __eq__(self, other):
if isinstance(other, datetime):
return self.__cmp(other) == 0
elif hasattr(other, "timetuple") and not isinstance(other, date):
return NotImplemented
else:
return False
def __ne__(self, other):
if isinstance(other, datetime):
return self.__cmp(other) != 0
elif hasattr(other, "timetuple") and not isinstance(other, date):
return NotImplemented
else:
return True
def __le__(self, other):
if isinstance(other, datetime):
return self.__cmp(other) <= 0
elif hasattr(other, "timetuple") and not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def __lt__(self, other):
if isinstance(other, datetime):
return self.__cmp(other) < 0
elif hasattr(other, "timetuple") and not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def __ge__(self, other):
if isinstance(other, datetime):
return self.__cmp(other) >= 0
elif hasattr(other, "timetuple") and not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def __gt__(self, other):
if isinstance(other, datetime):
return self.__cmp(other) > 0
elif hasattr(other, "timetuple") and not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def __cmp(self, other):
assert isinstance(other, datetime)
mytz = self._tzinfo
ottz = other._tzinfo
myoff = otoff = None
if mytz is ottz:
base_compare = True
else:
if mytz is not None:
myoff = self._utcoffset()
if ottz is not None:
otoff = other._utcoffset()
base_compare = myoff == otoff
if base_compare:
return cmp((self.__year, self.__month, self.__day,
self.__hour, self.__minute, self.__second,
self.__microsecond),
(other.__year, other.__month, other.__day,
other.__hour, other.__minute, other.__second,
other.__microsecond))
if myoff is None or otoff is None:
# XXX Buggy in 2.2.2.
raise TypeError("cannot compare naive and aware datetimes")
# XXX What follows could be done more efficiently...
diff = self - other # this will take offsets into account
if diff.days < 0:
return -1
return diff and 1 or 0
def __add__(self, other):
"Add a datetime and a timedelta."
if not isinstance(other, timedelta):
return NotImplemented
t = tmxxx(self.__year,
self.__month,
self.__day + other.days,
self.__hour,
self.__minute,
self.__second + other.seconds,
self.__microsecond + other.microseconds)
self._checkOverflow(t.year)
result = datetime(t.year, t.month, t.day,
t.hour, t.minute, t.second,
t.microsecond, tzinfo=self._tzinfo)
return result
__radd__ = __add__
def __sub__(self, other):
"Subtract two datetimes, or a datetime and a timedelta."
if not isinstance(other, datetime):
if isinstance(other, timedelta):
return self + -other
return NotImplemented
days1 = self.toordinal()
days2 = other.toordinal()
secs1 = self.__second + self.__minute * 60 + self.__hour * 3600
secs2 = other.__second + other.__minute * 60 + other.__hour * 3600
base = timedelta(days1 - days2,
secs1 - secs2,
self.__microsecond - other.__microsecond)
if self._tzinfo is other._tzinfo:
return base
myoff = self._utcoffset()
otoff = other._utcoffset()
if myoff == otoff:
return base
if myoff is None or otoff is None:
raise TypeError, "cannot mix naive and timezone-aware time"
return base + timedelta(minutes = otoff-myoff)
def __hash__(self):
tzoff = self._utcoffset()
if tzoff is None:
return hash(self.__getstate()[0])
days = _ymd2ord(self.year, self.month, self.day)
seconds = self.hour * 3600 + (self.minute - tzoff) * 60 + self.second
return hash(timedelta(days, seconds, self.microsecond))
# Pickle support.
__safe_for_unpickling__ = True # For Python 2.2
def __getstate(self):
yhi, ylo = divmod(self.__year, 256)
us2, us3 = divmod(self.__microsecond, 256)
us1, us2 = divmod(us2, 256)
basestate = ("%c" * 10) % (yhi, ylo, self.__month, self.__day,
self.__hour, self.__minute, self.__second,
us1, us2, us3)
if self._tzinfo is None:
return (basestate,)
else:
return (basestate, self._tzinfo)
def __setstate(self, string, tzinfo):
(yhi, ylo, self.__month, self.__day, self.__hour,
self.__minute, self.__second, us1, us2, us3) = map(ord, string)
self.__year = yhi * 256 + ylo
self.__microsecond = (((us1 << 8) | us2) << 8) | us3
self._tzinfo = tzinfo
def __reduce__(self):
return (self.__class__, self.__getstate())
def __tojava__(self, java_class):
# TODO, if self.tzinfo is not None, convert time to UTC
from java.lang import Object
from java.sql import Timestamp
from java.util import Calendar
from org.python.core import Py
if java_class not in (Calendar, Timestamp, Object):
return Py.NoConversion
calendar = Calendar.getInstance()
calendar.clear()
calendar.set(self.year, self.month - 1, self.day,
self.hour, self.minute, self.second)
if java_class == Calendar:
calendar.set(Calendar.MILLISECOND, self.microsecond // 1000)
return calendar
else:
timestamp = Timestamp(calendar.getTimeInMillis())
timestamp.setNanos(self.microsecond * 1000)
return timestamp
datetime.min = datetime(1, 1, 1)
datetime.max = datetime(9999, 12, 31, 23, 59, 59, 999999)
datetime.resolution = timedelta(microseconds=1)
def _isoweek1monday(year):
# Helper to calculate the day number of the Monday starting week 1
# XXX This could be done more efficiently
THURSDAY = 3
firstday = _ymd2ord(year, 1, 1)
firstweekday = (firstday + 6) % 7 # See weekday() above
week1monday = firstday - firstweekday
if firstweekday > THURSDAY:
week1monday += 7
return week1monday
"""
Some time zone algebra. For a datetime x, let
x.n = x stripped of its timezone -- its naive time.
x.o = x.utcoffset(), and assuming that doesn't raise an exception or
return None
x.d = x.dst(), and assuming that doesn't raise an exception or
return None
x.s = x's standard offset, x.o - x.d
Now some derived rules, where k is a duration (timedelta).
1. x.o = x.s + x.d
This follows from the definition of x.s.
2. If x and y have the same tzinfo member, x.s = y.s.
This is actually a requirement, an assumption we need to make about
sane tzinfo classes.
3. The naive UTC time corresponding to x is x.n - x.o.
This is again a requirement for a sane tzinfo class.
4. (x+k).s = x.s
This follows from #2, and that datimetimetz+timedelta preserves tzinfo.
5. (x+k).n = x.n + k
Again follows from how arithmetic is defined.
Now we can explain tz.fromutc(x). Let's assume it's an interesting case
(meaning that the various tzinfo methods exist, and don't blow up or return
None when called).
The function wants to return a datetime y with timezone tz, equivalent to x.
x is already in UTC.
By #3, we want
y.n - y.o = x.n [1]
The algorithm starts by attaching tz to x.n, and calling that y. So
x.n = y.n at the start. Then it wants to add a duration k to y, so that [1]
becomes true; in effect, we want to solve [2] for k:
(y+k).n - (y+k).o = x.n [2]
By #1, this is the same as
(y+k).n - ((y+k).s + (y+k).d) = x.n [3]
By #5, (y+k).n = y.n + k, which equals x.n + k because x.n=y.n at the start.
Substituting that into [3],
x.n + k - (y+k).s - (y+k).d = x.n; the x.n terms cancel, leaving
k - (y+k).s - (y+k).d = 0; rearranging,
k = (y+k).s - (y+k).d; by #4, (y+k).s == y.s, so
k = y.s - (y+k).d
On the RHS, (y+k).d can't be computed directly, but y.s can be, and we
approximate k by ignoring the (y+k).d term at first. Note that k can't be
very large, since all offset-returning methods return a duration of magnitude
less than 24 hours. For that reason, if y is firmly in std time, (y+k).d must
be 0, so ignoring it has no consequence then.
In any case, the new value is
z = y + y.s [4]
It's helpful to step back at look at [4] from a higher level: it's simply
mapping from UTC to tz's standard time.
At this point, if
z.n - z.o = x.n [5]
we have an equivalent time, and are almost done. The insecurity here is
at the start of daylight time. Picture US Eastern for concreteness. The wall
time jumps from 1:59 to 3:00, and wall hours of the form 2:MM don't make good
sense then. The docs ask that an Eastern tzinfo class consider such a time to
be EDT (because it's "after 2"), which is a redundant spelling of 1:MM EST
on the day DST starts. We want to return the 1:MM EST spelling because that's
the only spelling that makes sense on the local wall clock.
In fact, if [5] holds at this point, we do have the standard-time spelling,
but that takes a bit of proof. We first prove a stronger result. What's the
difference between the LHS and RHS of [5]? Let
diff = x.n - (z.n - z.o) [6]
Now
z.n = by [4]
(y + y.s).n = by #5
y.n + y.s = since y.n = x.n
x.n + y.s = since z and y are have the same tzinfo member,
y.s = z.s by #2
x.n + z.s
Plugging that back into [6] gives
diff =
x.n - ((x.n + z.s) - z.o) = expanding
x.n - x.n - z.s + z.o = cancelling
- z.s + z.o = by #2
z.d
So diff = z.d.
If [5] is true now, diff = 0, so z.d = 0 too, and we have the standard-time
spelling we wanted in the endcase described above. We're done. Contrarily,
if z.d = 0, then we have a UTC equivalent, and are also done.
If [5] is not true now, diff = z.d != 0, and z.d is the offset we need to
add to z (in effect, z is in tz's standard time, and we need to shift the
local clock into tz's daylight time).
Let
z' = z + z.d = z + diff [7]
and we can again ask whether
z'.n - z'.o = x.n [8]
If so, we're done. If not, the tzinfo class is insane, according to the
assumptions we've made. This also requires a bit of proof. As before, let's
compute the difference between the LHS and RHS of [8] (and skipping some of
the justifications for the kinds of substitutions we've done several times
already):
diff' = x.n - (z'.n - z'.o) = replacing z'.n via [7]
x.n - (z.n + diff - z'.o) = replacing diff via [6]
x.n - (z.n + x.n - (z.n - z.o) - z'.o) =
x.n - z.n - x.n + z.n - z.o + z'.o = cancel x.n
- z.n + z.n - z.o + z'.o = cancel z.n
- z.o + z'.o = #1 twice
-z.s - z.d + z'.s + z'.d = z and z' have same tzinfo
z'.d - z.d
So z' is UTC-equivalent to x iff z'.d = z.d at this point. If they are equal,
we've found the UTC-equivalent so are done. In fact, we stop with [7] and
return z', not bothering to compute z'.d.
How could z.d and z'd differ? z' = z + z.d [7], so merely moving z' by
a dst() offset, and starting *from* a time already in DST (we know z.d != 0),
would have to change the result dst() returns: we start in DST, and moving
a little further into it takes us out of DST.
There isn't a sane case where this can happen. The closest it gets is at
the end of DST, where there's an hour in UTC with no spelling in a hybrid
tzinfo class. In US Eastern, that's 5:MM UTC = 0:MM EST = 1:MM EDT. During
that hour, on an Eastern clock 1:MM is taken as being in standard time (6:MM
UTC) because the docs insist on that, but 0:MM is taken as being in daylight
time (4:MM UTC). There is no local time mapping to 5:MM UTC. The local
clock jumps from 1:59 back to 1:00 again, and repeats the 1:MM hour in
standard time. Since that's what the local clock *does*, we want to map both
UTC hours 5:MM and 6:MM to 1:MM Eastern. The result is ambiguous
in local time, but so it goes -- it's the way the local clock works.
When x = 5:MM UTC is the input to this algorithm, x.o=0, y.o=-5 and y.d=0,
so z=0:MM. z.d=60 (minutes) then, so [5] doesn't hold and we keep going.
z' = z + z.d = 1:MM then, and z'.d=0, and z'.d - z.d = -60 != 0 so [8]
(correctly) concludes that z' is not UTC-equivalent to x.
Because we know z.d said z was in daylight time (else [5] would have held and
we would have stopped then), and we know z.d != z'.d (else [8] would have held
and we we have stopped then), and there are only 2 possible values dst() can
return in Eastern, it follows that z'.d must be 0 (which it is in the example,
but the reasoning doesn't depend on the example -- it depends on there being
two possible dst() outcomes, one zero and the other non-zero). Therefore
z' must be in standard time, and is the spelling we want in this case.
Note again that z' is not UTC-equivalent as far as the hybrid tzinfo class is
concerned (because it takes z' as being in standard time rather than the
daylight time we intend here), but returning it gives the real-life "local
clock repeats an hour" behavior when mapping the "unspellable" UTC hour into
tz.
When the input is 6:MM, z=1:MM and z.d=0, and we stop at once, again with
the 1:MM standard time spelling we want.
So how can this break? One of the assumptions must be violated. Two
possibilities:
1) [2] effectively says that y.s is invariant across all y belong to a given
time zone. This isn't true if, for political reasons or continental drift,
a region decides to change its base offset from UTC.
2) There may be versions of "double daylight" time where the tail end of
the analysis gives up a step too early. I haven't thought about that
enough to say.
In any case, it's clear that the default fromutc() is strong enough to handle
"almost all" time zones: so long as the standard offset is invariant, it
doesn't matter if daylight time transition points change from year to year, or
if daylight time is skipped in some years; it doesn't matter how large or
small dst() may get within its bounds; and it doesn't even matter if some
perverse time zone returns a negative dst()). So a breaking case must be
pretty bizarre, and a tzinfo subclass can override fromutc() if it is.
"""
| apache-2.0 | -1,788,848,857,167,292,200 | 34.99615 | 89 | 0.563369 | false |
lodemo/CATANA | src/face_recognition/youtube_dl/extractor/tvp.py | 22 | 8498 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
determine_ext,
clean_html,
get_element_by_attribute,
ExtractorError,
)
class TVPIE(InfoExtractor):
IE_NAME = 'tvp'
IE_DESC = 'Telewizja Polska'
_VALID_URL = r'https?://[^/]+\.tvp\.(?:pl|info)/(?:(?!\d+/)[^/]+/)*(?P<id>\d+)'
_TESTS = [{
'url': 'http://vod.tvp.pl/194536/i-seria-odc-13',
'md5': '8aa518c15e5cc32dfe8db400dc921fbb',
'info_dict': {
'id': '194536',
'ext': 'mp4',
'title': 'Czas honoru, I seria – odc. 13',
'description': 'md5:76649d2014f65c99477be17f23a4dead',
},
}, {
'url': 'http://www.tvp.pl/there-can-be-anything-so-i-shortened-it/17916176',
'md5': 'b0005b542e5b4de643a9690326ab1257',
'info_dict': {
'id': '17916176',
'ext': 'mp4',
'title': 'TVP Gorzów pokaże filmy studentów z podroży dookoła świata',
'description': 'TVP Gorzów pokaże filmy studentów z podroży dookoła świata',
},
}, {
# page id is not the same as video id(#7799)
'url': 'http://vod.tvp.pl/22704887/08122015-1500',
'md5': 'cf6a4705dfd1489aef8deb168d6ba742',
'info_dict': {
'id': '22680786',
'ext': 'mp4',
'title': 'Wiadomości, 08.12.2015, 15:00',
},
}, {
'url': 'http://vod.tvp.pl/seriale/obyczajowe/na-sygnale/sezon-2-27-/odc-39/17834272',
'only_matching': True,
}, {
'url': 'http://wiadomosci.tvp.pl/25169746/24052016-1200',
'only_matching': True,
}, {
'url': 'http://krakow.tvp.pl/25511623/25lecie-mck-wyjatkowe-miejsce-na-mapie-krakowa',
'only_matching': True,
}, {
'url': 'http://teleexpress.tvp.pl/25522307/wierni-wzieli-udzial-w-procesjach',
'only_matching': True,
}, {
'url': 'http://sport.tvp.pl/25522165/krychowiak-uspokaja-w-sprawie-kontuzji-dwa-tygodnie-to-maksimum',
'only_matching': True,
}, {
'url': 'http://www.tvp.info/25511919/trwa-rewolucja-wladza-zdecydowala-sie-na-pogwalcenie-konstytucji',
'only_matching': True,
}]
def _real_extract(self, url):
page_id = self._match_id(url)
webpage = self._download_webpage(url, page_id)
video_id = self._search_regex([
r'<iframe[^>]+src="[^"]*?object_id=(\d+)',
r"object_id\s*:\s*'(\d+)'",
r'data-video-id="(\d+)"'], webpage, 'video id', default=page_id)
return {
'_type': 'url_transparent',
'url': 'tvp:' + video_id,
'description': self._og_search_description(webpage, default=None),
'thumbnail': self._og_search_thumbnail(webpage),
'ie_key': 'TVPEmbed',
}
class TVPEmbedIE(InfoExtractor):
IE_NAME = 'tvp:embed'
IE_DESC = 'Telewizja Polska'
_VALID_URL = r'(?:tvp:|https?://[^/]+\.tvp\.(?:pl|info)/sess/tvplayer\.php\?.*?object_id=)(?P<id>\d+)'
_TESTS = [{
'url': 'http://www.tvp.pl/sess/tvplayer.php?object_id=22670268',
'md5': '8c9cd59d16edabf39331f93bf8a766c7',
'info_dict': {
'id': '22670268',
'ext': 'mp4',
'title': 'Panorama, 07.12.2015, 15:40',
},
}, {
'url': 'tvp:22670268',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(
'http://www.tvp.pl/sess/tvplayer.php?object_id=%s' % video_id, video_id)
error_massage = get_element_by_attribute('class', 'msg error', webpage)
if error_massage:
raise ExtractorError('%s said: %s' % (
self.IE_NAME, clean_html(error_massage)), expected=True)
title = self._search_regex(
r'name\s*:\s*([\'"])Title\1\s*,\s*value\s*:\s*\1(?P<title>.+?)\1',
webpage, 'title', group='title')
series_title = self._search_regex(
r'name\s*:\s*([\'"])SeriesTitle\1\s*,\s*value\s*:\s*\1(?P<series>.+?)\1',
webpage, 'series', group='series', default=None)
if series_title:
title = '%s, %s' % (series_title, title)
thumbnail = self._search_regex(
r"poster\s*:\s*'([^']+)'", webpage, 'thumbnail', default=None)
video_url = self._search_regex(
r'0:{src:([\'"])(?P<url>.*?)\1', webpage,
'formats', group='url', default=None)
if not video_url or 'material_niedostepny.mp4' in video_url:
video_url = self._download_json(
'http://www.tvp.pl/pub/stat/videofileinfo?video_id=%s' % video_id,
video_id)['video_url']
formats = []
video_url_base = self._search_regex(
r'(https?://.+?/video)(?:\.(?:ism|f4m|m3u8)|-\d+\.mp4)',
video_url, 'video base url', default=None)
if video_url_base:
# TODO: <Group> found instead of <AdaptationSet> in MPD manifest.
# It's not mentioned in MPEG-DASH standard. Figure that out.
# formats.extend(self._extract_mpd_formats(
# video_url_base + '.ism/video.mpd',
# video_id, mpd_id='dash', fatal=False))
formats.extend(self._extract_ism_formats(
video_url_base + '.ism/Manifest',
video_id, 'mss', fatal=False))
formats.extend(self._extract_f4m_formats(
video_url_base + '.ism/video.f4m',
video_id, f4m_id='hds', fatal=False))
m3u8_formats = self._extract_m3u8_formats(
video_url_base + '.ism/video.m3u8', video_id,
'mp4', 'm3u8_native', m3u8_id='hls', fatal=False)
self._sort_formats(m3u8_formats)
m3u8_formats = list(filter(
lambda f: f.get('vcodec') != 'none', m3u8_formats))
formats.extend(m3u8_formats)
for i, m3u8_format in enumerate(m3u8_formats, 2):
http_url = '%s-%d.mp4' % (video_url_base, i)
if self._is_valid_url(http_url, video_id):
f = m3u8_format.copy()
f.update({
'url': http_url,
'format_id': f['format_id'].replace('hls', 'http'),
'protocol': 'http',
})
formats.append(f)
else:
formats = [{
'format_id': 'direct',
'url': video_url,
'ext': determine_ext(video_url, 'mp4'),
}]
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail,
'formats': formats,
}
class TVPSeriesIE(InfoExtractor):
IE_NAME = 'tvp:series'
_VALID_URL = r'https?://vod\.tvp\.pl/(?:[^/]+/){2}(?P<id>[^/]+)/?$'
_TESTS = [{
'url': 'http://vod.tvp.pl/filmy-fabularne/filmy-za-darmo/ogniem-i-mieczem',
'info_dict': {
'title': 'Ogniem i mieczem',
'id': '4278026',
},
'playlist_count': 4,
}, {
'url': 'http://vod.tvp.pl/audycje/podroze/boso-przez-swiat',
'info_dict': {
'title': 'Boso przez świat',
'id': '9329207',
},
'playlist_count': 86,
}]
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id, tries=5)
title = self._html_search_regex(
r'(?s) id=[\'"]path[\'"]>(?:.*? / ){2}(.*?)</span>', webpage, 'series')
playlist_id = self._search_regex(r'nodeId:\s*(\d+)', webpage, 'playlist id')
playlist = self._download_webpage(
'http://vod.tvp.pl/vod/seriesAjax?type=series&nodeId=%s&recommend'
'edId=0&sort=&page=0&pageSize=10000' % playlist_id, display_id, tries=5,
note='Downloading playlist')
videos_paths = re.findall(
'(?s)class="shortTitle">.*?href="(/[^"]+)', playlist)
entries = [
self.url_result('http://vod.tvp.pl%s' % v_path, ie=TVPIE.ie_key())
for v_path in videos_paths]
return {
'_type': 'playlist',
'id': playlist_id,
'display_id': display_id,
'title': title,
'entries': entries,
}
| mit | 6,646,843,938,213,271,000 | 36.530973 | 111 | 0.511908 | false |
milkey-mouse/SongRater | flasktasks.py | 1 | 4842 | # -*- coding: cp1252 -*-
#Copyright Milkey Mouse 2015
from HTMLParser import HTMLParser
import simplejson as json
import celery
from celery import Celery
from flask import render_template
from flask.ext.gzip import Gzip
from celery import task
from flask import Flask
import datetime
import urllib2
import string
import math
import sys
import os
celery = Celery("flasktasks")
@celery.task(bind=True)
def rate(self, name):
address = "http://www.songlyrics.com/index.php?section=search&searchW="
address = address + name
response = urllib2.urlopen(address)
html = response.read()
done = False
html = html[html.find('<div class="serpresult">'):]
html = html[html.find('http://'):]
html = html[:html.find('"')]
#try:
response = urllib2.urlopen(html)
html = response.read()
html = html[html.find('<p id="songLyricsDiv"'):]
html = html[html.find('>') + 1:]
html = html[:html.find('</p>')]
html = html.replace("<br>", "\n")
html = html.replace("<br />", "\n")
score = 0.00
last_word = ""
h = HTMLParser()
words = html.replace("\n", " ").split(" ")
wc = len(words)
wd = 0
for word2 in words:
try:
word = h.unescape(word2.lower())
try:
word = word.replace("’", "'")
except:
pass
try:
word = word.translate(string.maketrans("",""), string.punctuation);
except:
pass
wd += 1
if(word != word.strip()):
continue
if(word == ""):
continue
if(os.path.exists("./cache/word/" + word + ".txt")):
text = open("./cache/word/" + word + ".txt", 'r')
score = score + int(float(text.read()))
text.close()
else:
lastscore = score
if word == 'I':
#print "STUPID WORD:" + word
score = score + 1
if word == 'baby':
#print "STUPID WORD:" + word
score = score + 1
if word == 'butt':
#print "STUPID WORD:" + word
score = score + 2
if word == 'no':
#print "STUPID WORD:" + word
score = score + 1
if word == 'oh':
#print "STUPID WORD:" + word
score = score + 0.5
if word == 'back':
#print "STUPID WORD:" + word
score = score + 1
if word == 'gone':
#print "STUPID WORD:" + word
score = score + 0.5
if word == 'yeah':
#print "STUPID WORD:" + word
score = score + 1
if word == 'mine':
#print "STUPID WORD:" + word
score = score + 1
if word == 'fat':
#print "STUPID WORD:" + word
score = score + 2
if word == 'love':
#print "STUPID WORD:" + word
score = score + 1.5
if word == 'curves':
#print "STUPID WORD:" + word
score = score + 2.5
if(lastscore != score):
continue
isword = False
try:
response2 = urllib2.urlopen("http://dictionary.reference.com/browse/" + word).read()
if not '<div class="game-scrabble">' in response2:
isword = False
else:
isword = True
except:
isword = True
if isword == False:
#print "NOT A WORD:" + word
score = score + 2
if(word == "nah"):
#print "CHANT REPEATED: " + word
last_word = word
pass
elif(word == "na"):
#print "CHANT REPEATED: " + word
last_word = word
pass
elif(word == last_word):
score = score + 1
#print "WORD REPEATED: " + word
pass
last_word = word
text = open("./cache/word/" + word + ".txt", 'w')
text.write(str(score - lastscore))
text.close()
self.update_state(state='PROGRESS', meta=str(wd / words.count * 100) + "%")
score += 100
#print "CACHED: " + word
except:
pass
score = score / (len(html) - 1)
score = score * 750
return score
| mit | 7,243,354,705,898,804,000 | 33.820144 | 104 | 0.426446 | false |
Arcanemagus/SickRage | sickbeard/traktTrending.py | 6 | 5411 | # coding=utf-8
from __future__ import print_function, unicode_literals
import os
import posixpath
from libtrakt.exceptions import traktException
from libtrakt.trakt import TraktAPI
import sickbeard
from sickbeard import helpers, logger
from sickbeard.indexers.indexer_config import INDEXER_TVDB
from sickrage.helper.encoding import ek
from sickrage.helper.exceptions import ex, MultipleShowObjectsException
class traktTrending(object):
def __init__(self):
"""Gets a list of most popular TV series from imdb"""
self.session = helpers.make_session()
def fetch_trending_shows(self, trakt_list, page_url):
"""Get trending show information from Trakt"""
trending_shows = []
trakt_api = TraktAPI(sickbeard.SSL_VERIFY, sickbeard.TRAKT_TIMEOUT)
try:
not_liked_show = ""
if sickbeard.TRAKT_ACCESS_TOKEN != '':
library_shows = trakt_api.traktRequest("sync/collection/shows?extended=full") or []
if sickbeard.TRAKT_BLACKLIST_NAME:
not_liked_show = trakt_api.traktRequest("users/" + sickbeard.TRAKT_USERNAME + "/lists/" + sickbeard.TRAKT_BLACKLIST_NAME + "/items") or []
else:
logger.log("Trakt blacklist name is empty", logger.DEBUG)
if trakt_list not in ["recommended", "newshow", "newseason"]:
limit_show = "?limit=" + str(100 + len(not_liked_show)) + "&"
else:
limit_show = "?"
shows = trakt_api.traktRequest(page_url + limit_show + "extended=full") or []
if sickbeard.TRAKT_ACCESS_TOKEN != '':
library_shows = trakt_api.traktRequest("sync/collection/shows?extended=full") or []
for show in shows:
try:
if 'show' not in show:
show['show'] = show
if sickbeard.TRAKT_ACCESS_TOKEN != '':
if show['show']['ids']['tvdb'] not in (lshow['show']['ids']['tvdb'] for lshow in library_shows):
if not_liked_show:
if show['show']['ids']['tvdb'] not in (show['show']['ids']['tvdb'] for show in not_liked_show if show['type'] == 'show'):
trending_shows += [show]
else:
trending_shows += [show]
else:
if not_liked_show:
if show['show']['ids']['tvdb'] not in (show['show']['ids']['tvdb'] for show in not_liked_show if show['type'] == 'show'):
trending_shows += [show]
else:
trending_shows += [show]
except MultipleShowObjectsException:
continue
if sickbeard.TRAKT_BLACKLIST_NAME != '':
black_list = True
else:
black_list = False
except traktException as e:
logger.log("Could not connect to Trakt service: {0}".format(ex(e)), logger.WARNING)
for trending_show in trending_shows:
# get indexer id
indexer_id = trending_show['show']['ids']['tvdb']
trending_show['indexer_id'] = indexer_id
# set image path to show (needed to show it on the screen from the cache)
image_name = self.get_image_name(indexer_id)
image_path_relative = ek(posixpath.join, 'images', 'trakt_trending', image_name)
trending_show['image_path'] = image_path_relative
# clear indexer_id if we already have the image in the cache so we don't retrieve it again
image_path = self.get_image_path(image_name)
if ek(os.path.isfile, image_path):
trending_show['indexer_id'] = ''
return trending_shows, black_list
@staticmethod
def get_image_url(indexer_id):
""" Get poster image url from TVDB """
image_url = None
try:
lINDEXER_API_PARMS = sickbeard.indexerApi(INDEXER_TVDB).api_params.copy()
lINDEXER_API_PARMS['banners'] = True
t = sickbeard.indexerApi(INDEXER_TVDB).indexer(**lINDEXER_API_PARMS)
indexer_show_obj = t[int(indexer_id)]
except (sickbeard.indexer_error, IOError) as e:
logger.log("Show id " + indexer_id + " not found on " + sickbeard.indexerApi(INDEXER_TVDB).name +
", not downloading poster: " + ex(e), logger.DEBUG)
return None
if getattr(indexer_show_obj, 'poster', None):
image_url = indexer_show_obj['poster'].replace('posters', '_cache/posters')
return image_url
@staticmethod
def get_image_name(indexer_id):
return str(indexer_id) + ".jpg"
@staticmethod
def get_image_path(image_name):
path = ek(os.path.abspath, ek(os.path.join, sickbeard.CACHE_DIR, 'images', 'trakt_trending'))
if not ek(os.path.exists, path):
ek(os.makedirs, path)
return ek(os.path.join, path, image_name)
def cache_image(self, image_url, image_path):
# Only cache if the file does not exist yet
if not ek(os.path.isfile, image_path):
helpers.download_file(image_url, image_path, session=self.session)
trakt_trending = traktTrending()
| gpl-3.0 | 4,460,796,137,083,000,300 | 39.081481 | 158 | 0.563297 | false |
pieiscool/edited-hearthbreaker | tests/agents/trade_agent_tests.py | 2 | 1499 | import unittest
from hearthbreaker.cards import Wisp, WarGolem, BloodfenRaptor, RiverCrocolisk, AbusiveSergeant, ArgentSquire
from tests.agents.trade.test_helpers import TestHelpers
from tests.agents.trade.test_case_mixin import TestCaseMixin
from hearthbreaker.agents.trade.possible_play import PossiblePlays
class TestTradeAgent(TestCaseMixin, unittest.TestCase):
def test_setup_smoke(self):
game = TestHelpers().make_game()
self.add_minions(game, 0, Wisp(), WarGolem())
self.add_minions(game, 1, BloodfenRaptor())
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(1, len(game.players[1].minions))
def test_basic_trade(self):
game = TestHelpers().make_game()
self.add_minions(game, 1, Wisp(), WarGolem())
self.add_minions(game, 0, BloodfenRaptor())
self.make_all_active(game)
game.play_single_turn()
self.assert_minions(game.players[1], "War Golem")
self.assert_minions(game.players[0], "Bloodfen Raptor")
def test_buff_target(self):
game = TestHelpers().make_game()
self.add_minions(game, 0, BloodfenRaptor(), RiverCrocolisk())
self.make_all_active(game)
self.add_minions(game, 0, AbusiveSergeant())
game.play_single_turn()
def test_hero_power(self):
cards = [ArgentSquire()]
possible_plays = PossiblePlays(cards, 10, allow_hero_power=True)
self.assertEqual(1, len(possible_plays.plays()))
| mit | 4,464,051,441,537,592,300 | 33.860465 | 109 | 0.677785 | false |
cyberark-bizdev/ansible | test/units/modules/network/netscaler/netscaler_module.py | 57 | 1638 | import sys
from ansible.compat.tests.mock import patch, Mock
from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase
base_modules_mock = Mock()
nitro_service_mock = Mock()
nitro_exception_mock = Mock()
base_modules_to_mock = {
'nssrc': base_modules_mock,
'nssrc.com': base_modules_mock,
'nssrc.com.citrix': base_modules_mock,
'nssrc.com.citrix.netscaler': base_modules_mock,
'nssrc.com.citrix.netscaler.nitro': base_modules_mock,
'nssrc.com.citrix.netscaler.nitro.resource': base_modules_mock,
'nssrc.com.citrix.netscaler.nitro.resource.config': base_modules_mock,
'nssrc.com.citrix.netscaler.nitro.exception': base_modules_mock,
'nssrc.com.citrix.netscaler.nitro.exception.nitro_exception': base_modules_mock,
'nssrc.com.citrix.netscaler.nitro.exception.nitro_exception.nitro_exception': nitro_exception_mock,
'nssrc.com.citrix.netscaler.nitro.service': base_modules_mock,
'nssrc.com.citrix.netscaler.nitro.service.nitro_service': base_modules_mock,
'nssrc.com.citrix.netscaler.nitro.service.nitro_service.nitro_service': nitro_service_mock,
}
nitro_base_patcher = patch.dict(sys.modules, base_modules_to_mock)
class TestModule(ModuleTestCase):
def failed(self):
with self.assertRaises(AnsibleFailJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'], result)
return result
def exited(self, changed=False):
with self.assertRaises(AnsibleExitJson) as exc:
self.module.main()
result = exc.exception.args[0]
return result
| gpl-3.0 | 2,779,889,809,877,129,700 | 36.227273 | 103 | 0.718559 | false |
CAChemE/stochastic-optimization | ConventionalDistillationColumn/column_algorithm.py | 1 | 2956 |
# -*- coding: utf-8 -*-
from Test_Column_ObjFnc import tac_column
import time
"""
% -------------------------------------------------------------------------
% SIMULATION-BASED OPTIMIZATION OF A SINGLE CONVENTIONAL DISTILLATION
% COLUMN USING THE PARTICLE SWARM OPTIMIZATION ALGORITHM
%--------------------------------------------------------------------------
% Juan Javaloyes Antón. Sep 2016 v.3
%--------------------------------------------------------------------------
% # 04 # Distillation column model
%--------------------------------------------------------------------------
"""
def distColumn_model(x, Problem):
# Independent Variables
RR = x[0] # * RR: Reflux Ratio
BR = x[1] # * BR: Boilup Ratio
NR = x[2] # * NR: Number of active trays in rectifying section
NS = x[3] # * NS: Number of active trays in stripping section
HyObject = Problem.HyObject # Recover Hysys Objects from structure Problem
NT = (NR + NS) + 1 # Total number of active trays
Feed_S = NR + 1 # Feed location
# 01 Change Column Topology and Column specifications (degrees of freedom)
HyObject = Problem.HyObject # Recover Hysys Objects from structure Problem
# Total number of active trays
HyObject.DistColumn.Main_TS.NumberOfTrays = NT
# Feed location
HyObject.DistColumn.Main_TS.SpecifyFeedLocation(HyObject.DistColumn.FeedMainTS, Feed_S)
# Reflux Ratio
HyObject.DistColumn.Column.ColumnFlowsheet.Specifications.Item('Reflux Ratio').GoalValue = RR
# Boilup Ratio
HyObject.DistColumn.Column.ColumnFlowsheet.Specifications.Item('Boilup Ratio').GoalValue = BR
# 02 Run Aspen Hysys model with new topology
HyObject.DistColumn.ColumnFlowsheet.Run() # Run Aspen Hysy model
# time.sleep(0.3)
# 03 Check model convergence
RunStatus = HyObject.HyApp.ActiveDocument.Flowsheet.Operations.Item(0).ColumnFlowsheet.CfsConverged
if RunStatus == 1:
# 04 Compute the Total Annual Cost of the Distillation Column
ColumnCost = tac_column(Problem) # from Test_Column_ObjFnc
# 05 Check purity constraints
Tol_dist = 0.001 # Molar Fraction Impurites
Bz_Bottoms = 0.001
Comp_frac_Tol_dist = HyObject.MaterialStream.Distillate.ComponentMolarFractionValue[1]
Comp_frac_Bz_Bott = HyObject.MaterialStream.Bottoms.ComponentMolarFractionValue[0]
if Comp_frac_Tol_dist > Tol_dist:
w1 = (Comp_frac_Tol_dist - Tol_dist)*1e5
else:
w1 = 0
if Comp_frac_Bz_Bott > Bz_Bottoms:
w2 = (Comp_frac_Bz_Bott - Bz_Bottoms)*1e5
else:
w2 = 0
# Total Annual Cost + penalty terms
TAC = ColumnCost.TAC + w1 + w2
else: # In case model does not converge
TAC = 1e5
return (TAC)
| bsd-3-clause | -8,751,511,323,158,388,000 | 35.481481 | 104 | 0.572589 | false |
chris-lee-mc/MutInf | cluster_list_to_pml.py | 1 | 20752 | import re, os, sys
from optparse import OptionParser
from numpy import *
import subprocess
from communities import *
class ResCluster :
members = []
selection_text = None #selection text for pymol
number = 0 #cluster number
intra_cluster_variance = 0
extra_cluster_variance = 0
indexlist = []
indexlist_complement = []
def __init__(self,number,firstcluster):
self.number = number
self.members = []
self.firstcluster = firstcluster
def __str__(self):
if(self.selection_text == None):
self.selection_text = ""
firstone = self.firstcluster
for member in self.members:
if firstone !=1:
self.selection_text +=","
firstone = 0
self.selection_text += "(resi "+str(member.number)+" "
if member.tag == "":
self.selection_text += " and (name N,H,CA,C,O) "
if member.tag == "S":
self.selection_text += " and (not name N,H,CA,C,O) "
if member.chain != None:
self.selection_text += " and (chain "+str(member.chain)+" ) "
self.selection_text += " )"
return "sele cluster"+str(self.number+1)+", "+"( "+self.selection_text+")\n color "+str(int(self.number+1))+", cluster"+str(self.number+1)+"\n"
def read_res_matrix(myfilename): #from dihedral_mutent.py
rownames = []
colnames = []
myfile = open(myfilename,'r')
inlines = myfile.readlines()
myfile.close()
res = inlines[0].split()
mymatrix = zeros((int(len(inlines[1:])), int(len(res))),float64)
#print mymatrix.shape
for myname_num in res:
colnames.append(myname_num)
#print colnames
#print len(colnames)
for row_num in range(int(len(inlines[1:]))):
thisline = inlines[row_num + 1]
thislinedata = thisline.split()
thisname = thislinedata[0]
res_num = int(floor(row_num))
thislinenums = map(float, thislinedata[1:]) #does this need to be float64 or another double precision thingy?
#print thislinenums
thislinearray = array(thislinenums,float64)
#print thislinearray.shape
rownames.append(thisname)
for col_num in range(len(colnames)):
#print "name: "+str(thisname)+" chi: "+str(row_chi)+ " row_num: "+str(row_num)+" row_chi: "+str(row_chi)+ " col_num: "+str(col_num)+" col_chi: "+str(col_chi)+"\n"
mymatrix[res_num,col_num] = float64(thislinearray[col_num])
#print rownames
return mymatrix, rownames, colnames
class Res:
name = "ALA"
number = 0
clusternum = -1 #not assigned to a cluster
tag = "" #default to mainchain
chain = None #default to no chain
def __init__(self,name,number,tag,clusternum,chain=None):
self.name = name
self.number = number
self.tag = tag
self.clusternum = clusternum
self.chain = chain
def __str__(self):
return str(self.name)+" "+str(self.number)+" "+str(self.tag)+" "+str(self.clusternum)+" "+str(self.chain)
def matrix_header(self):
if(self.chain == None):
return str(self.name)+str(self.number)+str(self.tag)
else:
return str(self.name)+str(self.number)+str(self.tag)+str(self.chain)
if __name__ == "__main__":
usage="%prog -s mypdb.pdb -f clusters.txt -o clusters.pml"
parser=OptionParser(usage)
### options for k-means and multidimensional scaling
parser.add_option("-s", "--structure", default=None, type="string", help="pdb file")
parser.add_option("-f", "--filename", default=None, type="string", help="space-delimeted text file with three columns: number residue name/number/tag cluster_number")
parser.add_option("-o", "--outfile", default="clusters.pml", type="string", help="filename for pymol session file")
parser.add_option("-b", "--begin", default=0, type=int, help="first residue offset")
parser.add_option("-m", "--matrix", default=None, type="string", help="matrix for intra/extra cluster average variances")
parser.add_option("-r", "--Rfile", default="cluster_kmeans_k12.txt",type="string", help="R commands file to be created")
### options for community analysis
parser.add_option("-c", "--contacts", default=None, type="string", help="matrix for contacts")
parser.add_option("-u", "--cutoff",default=0.5, type="float", help="cutoff value for edges")
parser.add_option("-p", "--prefix",default="mutinf", type="string", help="prefix for output filenames")
parser.add_option("-i", "--iterations",default=None, type="int", help="number of iterations")
parser.add_option("-a", "--hotatoms", default="../hotatoms1g.pdb", type="string", help="filename of CA and sidechain locations to use for community analysis")
parser.add_option("-t", "--mutinf", default=None, type="string", help="mutual information matrix filename")
parser.add_option("-n", "--noR", default="yes", type="string", help="whether to run R or not (yes/no)")
parser.add_option("-x", "--nocontacts", default="no", type="string", help="whether to use contacts for filtering mutinf matrix for community analysis")
parser.add_option("-e", "--equal_weights",default="no",type="string", help="whether or not to use mutinf-based edge weights or equal weights")
parser.add_option("-U", "--user_communities",default=None,type="string", help="filename for communities pickle")
#parser.add_option("-d", "--display_structure",default=None, type="string", help="full structure for pymol display")
## Generate cluster list using multidimensional scaling and kmeans in R ##
(options,args)=parser.parse_args()
print "options"
print options
distfile = options.matrix
distfile = distfile.replace('dist_variance', 'dist')
try:
Rfile = open(options.Rfile,'w')
except:
print "cannot open R command file to be created "+str(options.Rfile)
sys.exit(1)
if options.matrix != None:
variance_matrix, rownames, colnames = read_res_matrix(options.matrix)
R_code1 = """
library(marray)
library(fields)
library(cluster)
"""
Rfile.write(R_code1)
Rfile.write("prec <- read.table(\""+str(options.matrix)+"\")\n")
Rfile.write("precdist <- read.table(\""+str(distfile)+"\")\n")
R_code2 = """
pdf("distance_variance_vs_distance.pdf",height=12,width=12)
plot(c(as.matrix(precdist)),c(as.matrix(prec)),xlab="Distance",ylab="Distance Variance")
dev.off()
pdf("distance_variance_vs_distance_boxplot.pdf",height=12,width=12)
bplot.xy(x=c(as.matrix(precdist)),y=c(as.matrix(prec)+diag(nrow(prec))),N=20,xlab="Distance",ylab="Distance Variance")
dev.off()
nrow(prec)
ncol(prec)
blah = hclust(as.dist(prec),method="ward")
blah2 = cutree(blah, k=16)
blah3=as.table(blah2)
blah4=data.frame(blah3)
blah4
# write.table(blah4, "output_clusters.txt", quote=FALSE, sep='\t')
d <- as.dist(prec)
fit <- cmdscale(d,eig=TRUE,k=(nrow(prec)-1)/2) # k is the number of dim
# fit # view results
newdata = fit$points
# plot solution
x <- fit$points[,1]
y <- fit$points[,2]
plot(x, y, xlab="Coordinate 1", ylab="Coordinate 2",
main="Metric MDS", type="n")
text(x, y, labels = row.names(newdata), cex=.1)
plot(x, y, xlab="Coordinate 1", ylab="Coordinate 2",
main="Metric MDS", type="n")
text(x, y, labels = row.names(newdata), cex=.1)
# Determine number of clusters
wss <- (nrow(prec)-1)*sum(apply(newdata,2,var))
for (i in 2:14) {
ktest <- kmeans(newdata, centers=i, iter=500)
mysum <- 0
mysum2 <- 0
num1 <- 0
num2 <- 0
myratio <- 0
for(j in 1:i) {
if(length(which(ktest$cluster == j)) > 1) {
mysum <- mysum + mean(prec[which(ktest$cluster == j),which(ktest$cluster == j)])
num1 <- length(which(ktest$cluster == j))
mysum2 <- mysum2 + mean(prec[which(ktest$cluster == j),which(ktest$cluster != j)])
myratio <-myratio + mean(prec[which(ktest$cluster == j),which(ktest$cluster == j)]) / mean(prec[which(ktest$cluster == j),which(ktest$cluster != j)])
num2 <- length(which(ktest$cluster != j))
}
}
# wss[i] <- (mysum / num1) / (mysum2 / num2)
# wss[i] <- (mysum / mysum2)
wss[i] <- (myratio / num1)
}
#wss[i] <- sum(kmeans(newdata, centers=i)$withinss)
pdf("sum_intra_cluster_distance_variance_vs_clusters.pdf")
plot(5:14, wss[5:14], type="b", xlab="Number of Clusters",
ylab="Within groups average intra/extra cluster distance variance ratio")
dev.off()
plot(5:14, wss[5:14], type="b", xlab="Number of Clusters",
ylab="Within groups sum of squares")
# K-Means Cluster Analysis
fit <- kmeans(newdata, 12, iter=1000) # selected cluster solution
# get cluster means
aggregate(newdata,by=list(fit$cluster),FUN=mean)
# append cluster assignment
mydata <- data.frame(newdata, fit$cluster)
fit$size
"""
Rfile.write(R_code2)
Rfile.write("write.table(fit$cluster, \""+str(options.filename)+"\", quote=FALSE, sep='\t')")
Rfile.write("\n")
Rfile.write("\n")
Rfile.close()
### Run R to calculate clusters using multidimensional scaling #######
if options.noR == "yes":
print "Running R on "+str(options.Rfile)
p = subprocess.Popen("cat "+str(options.Rfile)+" | R --no-save", shell=True)
os.waitpid(p.pid, 0)
#########################################################################
## Read input file if a matrix is not provided, or cluster file that was just made by R if a matrix was provided ##
(options,args)=parser.parse_args()
try:
fil = open(options.filename,'r')
except:
print "cannot open input file "+str(options.filename)
sys.exit(1)
inlines = fil.readlines()
fil.close()
maxclusternum = -1 #maximum cluster num
reslist = [] #list of residues
#read file
for line in inlines[1:]:
matchline=re.compile(r'([0-9]*)\s*([A-Z][A-Z,0-9][A-Z,0-9])([0-9]+)([S]*)\s+([0-9]+)')
#print line
matches = matchline.match(line)
#print [matches.group(i) for i in range(5)]
if matches.group(2) != None:
name = matches.group(2)
if matches.group(3) != None:
number = int(matches.group(3))
if matches.group(4) == 'S':
tag = matches.group(4)
else:
tag = ''
if matches.group(5) != None:
clusternum = int(matches.group(5))
newres = Res(name,number,tag,clusternum) #offset residue number by option
reslist.append(newres)
if(clusternum > maxclusternum):
maxclusternum = clusternum
#write out clusters to pymol file
print "Writing Pymol Session File "+str(options.outfile)
outfile = open(options.outfile, 'w')
outfile.write("from pymol import cmd\n")
outfile.write("cmd.bg_color('white')\n")
outfile.write("load "+str(options.structure)+", system\n")
## need to fix this if it is to be used ## outfile.write("alter all, resi=str(int(resi -"+str(options.begin)-1")) \n")
clusterlist = []
#loop over clusters
for clusternum in range(1,maxclusternum+1):
thiscluster = []
thiscluster = ResCluster(clusternum, 1)
for residue in reslist:
#print "residue number: "+str(residue.number)+" cluster: "+str(residue.clusternum)
if residue.clusternum == clusternum:
thiscluster.members.append(residue)
#print "cluster: "+str(clusternum)
#for mymember in thiscluster.members:
# print str(mymember)
clusterlist.append(thiscluster)
outfile.write(str(thiscluster)) #output pymol selection line
#if intra-cluster variances over extra-cluster variance averages are to be put on the structure as b-factors
#print rownames
clusternum = 1
for thiscluster in clusterlist:
clusternum = thiscluster.number
thiscluster.indexlist = []
thiscluster.indexlist_complement = range(len(rownames))
for mymember in thiscluster.members:
myheader = mymember.matrix_header()
myindex = rownames.index(myheader) # look for header in rownames
#print "match "+str(myindex)
thiscluster.indexlist.append(myindex)
thiscluster.indexlist_complement.remove(myindex)
#print "cluster "+str(clusternum)+" members: "+str(len(thiscluster.members))
#print "within cluster: "+str(len(thiscluster.indexlist))
#print thiscluster.indexlist
#for myindex in thiscluster.indexlist:
# print rownames[myindex]
#print "outside cluster: "+str(len(thiscluster.indexlist_complement))
#print thiscluster.indexlist_complement
#for myindex in thiscluster.indexlist_complement:
# print rownames[myindex]
within_cluster_num_elements = 0
extra_cluster_num_elements = 0
for i in range(len(thiscluster.indexlist)):
this_column = variance_matrix[i,:]
#print "this column in indexlist"
#print this_column[thiscluster.indexlist]
this_row = variance_matrix[:,i]
#print "this row in indexlist"
#print this_row[thiscluster.indexlist]
thiscluster.intra_cluster_variance += sum(this_column[thiscluster.indexlist])
thiscluster.extra_cluster_variance += sum(this_column[thiscluster.indexlist_complement])
within_cluster_num_elements += len(thiscluster.indexlist)
extra_cluster_num_elements += len(thiscluster.indexlist_complement)
#within_cluster_num_elements += len(thiscluster.indexlist)
#use all values not in this cluster
#for j in range(len(thiscluster.indexlist_complement)):
# for k in range(len(thiscluster.indexlist_complement)):
# thiscluster.extra_cluster_variance += sum(variance_matrix[j,k])
# extra_cluster_num_elements +=1
#extra_cluster_num_elements += len(thiscluster.indexlist_complement)
#print "median var within cluster :"+str(median(this_row[thiscluster.indexlist]))
#print "median var outside cluster :"+str(median(this_row[thiscluster.indexlist_complement]))
thiscluster.intra_cluster_variance /= within_cluster_num_elements
thiscluster.extra_cluster_variance /= extra_cluster_num_elements
print "cluster "+str(clusternum)+", elements: "+str((within_cluster_num_elements))+",outside:"+str((extra_cluster_num_elements))+" within cluster average variance "+str(thiscluster.intra_cluster_variance)+" , outside of cluster average variance "+str(thiscluster.extra_cluster_variance)
outfile.write("alter cluster"+str(clusternum)+", b="+str(thiscluster.intra_cluster_variance / thiscluster.extra_cluster_variance )+"\n")
#outfile.write("color "+str(int(self.number-1))+", cluster"+str(self.number)+"\n"
#finish session file
outfile.write("cmd.show('cartoon' ,'system')\n")
if options.matrix != None:
outfile.write("preset.b_factor_putty('system')"+"\n") #,_self=cmd"+"\n")
outfile.write("cmd.spectrum('b',selection=('all'),quiet=0)"+" \n")
outfile.write("sele b_gt1, b > 1.0 \n")
outfile.write("alter b_gt1, b = 1.0 \n")
outfile.write("color bluewhite, b_gt1"+" \n")
outfile.write("cmd.show('sticks','((byres (system))&(!(n;c,o,h|(n. n&!r. pro))))')" + " \n")
if options.matrix != None:
outfile.write("cmd.spectrum('b',selection=('all'),quiet=0)"+" \n")
outfile.write("color bluewhite, b_gt1"+" \n")
outfile.write("cmd.hide('(all and hydro)')\n")
outfile.write("alter system, resi=str(int(resi)+"+str(options.begin)+")\n")
outfile.write("\n")
outfile.close()
# Run community analysis
#usage="%prog [-t traj1:traj2] [-x xvg_basedir] resfile [simulation numbers to use] # where resfile is in the format <1-based-index> <aa type> <res num>"
#communities_options=OptionParser(usage)
#communities_options.add_option("-s", "--structure", default=None, type="string", help="pdb file")
#communities_options.add_option("-f", "--filename", default=None, type="string", help="mutual information matrix")
#communities_options.add_option("-o", "--outfile", default="communities", type="string", help="prefix for output files")
#communities_options.add_option("-b", "--begin", default=0, type=int, help="first residue offset")
#communities_options.add_option("-m", "--matrix", default=None, type="string", help="matrix for contacts")
#communities_options.add_option("-e", "--edgelist", default="edgelist.dat",type="string", help="output edgelist data file")
#communities_options.add_option("-c", "--cutoff", default=0.5, type="float", help="cutoff value for edges")
#communities_options.add_option("-p", "--prefix",default="mutinf", type="string", help="prefix for output filenames")
#communities_options.add_option("-i", "--iterations",default=None, type="int", help="number of iterations")
#communities_options.add_option("-d", "--display_structure",default=None, type="string", help="full structure for pymol display")
class communities_run_options:
structure = None
filename = None
outfile = "communities"
begin = 0
matrix = None
edgelist = "edgelist.dat"
cutoff = 0.5
prefix = "mutinf"
iterations = None
display_structure = None
nocontacts = "no"
user_communities = None
def __init__(self):
return
## Community analysis on mutinf matrix
if(options.mutinf != "none"):
communities_options = communities_run_options()
communities_options.structure = options.hotatoms
communities_options.filename = options.mutinf
#communities_options.outfile = options.prefix + "_communities_mutinf"
communities_options.begin = options.begin
communities_options.matrix = options.contacts
#communities_options.edgelist = options.prefix +"_communities_mutinf_edgelist.dat"
communities_options.cutoff = options.cutoff
#communities_options.prefix = options.prefix + "_mutinf"
communities_options.iterations = options.iterations
communities_options.display_structure = options.structure
communities_options.nocontacts = options.nocontacts
#communities_options.prefix = options.prefix
communities_options.equal_weights = options.equal_weights
communities_options.user_communities = options.user_communities
# run community analysis from communities.py
mylambda = lambda(x): -.5*log(1-exp(-2*x/3))
run_communities(communities_options, mylambda)
## Community analysis on dist variance matrix
if(options.matrix != "none"):
communities2_options = communities_run_options()
communities2_options.structure = options.hotatoms
communities2_options.filename = options.matrix #difference here
communities2_options.outfile = options.prefix + "_communities_dist_variance"
communities2_options.begin = options.begin
communities2_options.matrix = options.contacts
communities2_options.edgelist = options.prefix + "_communities_dist_variance_dist_variance_edgelist.dat"
communities2_options.cutoff = 0.0
communities2_options.prefix = options.prefix + "_dist_variance"
communities2_options.iterations = options.iterations
communities2_options.display_structure = options.structure
communities2_options.nocontacts = options.nocontacts
# run community analysis from communities.py
mylambda = lambda(x): -.5*log(exp(-2*x/3))
#run_communities(communities2_options, mylambda)
#END
| gpl-3.0 | 7,366,411,161,771,070,000 | 45.321429 | 298 | 0.610254 | false |
thanethomson/statik | tests/integration/test_issue_51.py | 1 | 1717 | # -*- coding: utf-8 -*-
"""Regression test for issue #51."""
import unittest
import os.path
from xml.etree import ElementTree as ET
from statik.generator import generate
class TestStaticPagesFromProjectDynamicContext(unittest.TestCase):
def test_issue(self):
test_path = os.path.dirname(os.path.realpath(__file__))
output_data = generate(
os.path.join(test_path, 'data-non-root-base'),
in_memory=True
)
self.assertIn('index.html', output_data)
self.assertIn('about', output_data)
self.assertIn('index.html', output_data['about'])
self.assertIn('contact', output_data)
self.assertIn('index.html', output_data['contact'])
html = ET.fromstring(output_data['index.html'])
static_page_links = html.findall("body/div[@class='menu']/ul/li/a")
self.assertEqual(2, len(static_page_links))
self.assertEqual('/non/standard/about/', static_page_links[0].attrib['href'])
self.assertEqual('/non/standard/contact/', static_page_links[1].attrib['href'])
self.assert_static_page_compiles(
output_data['about']['index.html'],
"About",
"Here's the About page."
)
self.assert_static_page_compiles(
output_data['contact']['index.html'],
"Contact",
"Here's how to contact us."
)
def assert_static_page_compiles(self, content, expected_title, expected_body):
html = ET.fromstring(content)
title = html.find('head/title')
self.assertEqual(expected_title, title.text.strip())
body = html.find('body/p')
self.assertEqual(expected_body, body.text.strip())
| mit | 3,257,167,400,426,198,000 | 35.531915 | 87 | 0.616773 | false |
ibc/MediaSoup | worker/deps/gyp/test/cflags/gyptest-cflags.py | 38 | 2088 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies the use of the environment during regeneration when the gyp file
changes, specifically via build of an executable with C preprocessor
definition specified by CFLAGS.
In this test, gyp and build both run in same local environment.
"""
import TestGyp
# CPPFLAGS works in ninja but not make; CFLAGS works in both
FORMATS = ('make', 'ninja')
test = TestGyp.TestGyp(formats=FORMATS)
# First set CFLAGS to blank in case the platform doesn't support unsetenv.
with TestGyp.LocalEnv({'CFLAGS': '',
'GYP_CROSSCOMPILE': '1'}):
test.run_gyp('cflags.gyp')
test.build('cflags.gyp')
expect = """FOO not defined\n"""
test.run_built_executable('cflags', stdout=expect)
test.run_built_executable('cflags_host', stdout=expect)
test.sleep()
with TestGyp.LocalEnv({'CFLAGS': '-DFOO=1',
'GYP_CROSSCOMPILE': '1'}):
test.run_gyp('cflags.gyp')
test.build('cflags.gyp')
expect = """FOO defined\n"""
test.run_built_executable('cflags', stdout=expect)
# Environment variable CFLAGS shouldn't influence the flags for the host.
expect = """FOO not defined\n"""
test.run_built_executable('cflags_host', stdout=expect)
test.sleep()
with TestGyp.LocalEnv({'CFLAGS_host': '-DFOO=1',
'GYP_CROSSCOMPILE': '1'}):
test.run_gyp('cflags.gyp')
test.build('cflags.gyp')
# Environment variable CFLAGS_host should influence the flags for the host.
expect = """FOO defined\n"""
test.run_built_executable('cflags_host', stdout=expect)
test.sleep()
with TestGyp.LocalEnv({'CFLAGS': ''}):
test.run_gyp('cflags.gyp')
test.build('cflags.gyp')
expect = """FOO not defined\n"""
test.run_built_executable('cflags', stdout=expect)
test.sleep()
with TestGyp.LocalEnv({'CFLAGS': '-DFOO=1'}):
test.run_gyp('cflags.gyp')
test.build('cflags.gyp')
expect = """FOO defined\n"""
test.run_built_executable('cflags', stdout=expect)
test.pass_test()
| isc | 5,303,447,971,223,246,000 | 26.84 | 75 | 0.692529 | false |
bnwn/ardupilot | mk/VRBRAIN/Tools/genmsg/src/genmsg/srvs.py | 51 | 3095 | # Software License Agreement (BSD License)
#
# Copyright (c) 2008, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
ROS Service Description Language Spec
Implements http://ros.org/wiki/srv
"""
import os
import sys
from . names import is_legal_resource_name, is_legal_resource_base_name, package_resource_name, resource_name
class SrvSpec(object):
def __init__(self, request, response, text, full_name = '', short_name = '', package = ''):
alt_package, alt_short_name = package_resource_name(full_name)
if not package:
package = alt_package
if not short_name:
short_name = alt_short_name
self.request = request
self.response = response
self.text = text
self.full_name = full_name
self.short_name = short_name
self.package = package
def __eq__(self, other):
if not other or not isinstance(other, SrvSpec):
return False
return self.request == other.request and \
self.response == other.response and \
self.text == other.text and \
self.full_name == other.full_name and \
self.short_name == other.short_name and \
self.package == other.package
def __ne__(self, other):
if not other or not isinstance(other, SrvSpec):
return True
return not self.__eq__(other)
def __repr__(self):
return "SrvSpec[%s, %s]"%(repr(self.request), repr(self.response))
| gpl-3.0 | -4,494,958,927,959,344,000 | 37.679487 | 109 | 0.666882 | false |
gauribhoite/personfinder | env/google_appengine/gen_protorpc.py | 69 | 3127 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Convenience wrapper for starting an appengine tool."""
import os
import sys
import time
sys_path = sys.path
try:
sys.path = [os.path.dirname(__file__)] + sys.path
import wrapper_util
finally:
sys.path = sys_path
wrapper_util.reject_old_python_versions((2, 5))
if sys.version_info < (2, 6):
sys.stderr.write(
'WARNING: In an upcoming release the SDK will no longer support Python'
' 2.5. Users should upgrade to Python 2.6 or higher.\n')
time.sleep(1)
def get_dir_path(sibling):
"""Get a path to the directory of this script.
By default, the canonical path (symlinks resolved) will be returned. In some
environments the canonical directory is not sufficient because different
parts of the SDK are referenced by symlinks, including this very module's
file. In this case, the non-canonical path to this file's directory will be
returned (i.e., the directory where the symlink lives, not the directory
where it points).
Args:
sibling: Relative path to a sibling of this module file. Choose a sibling
that is potentially symlinked into the parent directory.
Returns:
A directory name.
Raises:
ValueError: If no proper path could be determined.
"""
return wrapper_util.get_dir_path(__file__, sibling)
DIR_PATH = get_dir_path(os.path.join('lib', 'ipaddr'))
_PATHS = wrapper_util.Paths(DIR_PATH)
SCRIPT_DIR = _PATHS.default_script_dir
GOOGLE_SQL_DIR = _PATHS.google_sql_dir
EXTRA_PATHS = _PATHS.v1_extra_paths
API_SERVER_EXTRA_PATHS = _PATHS.api_server_extra_paths
ENDPOINTSCFG_EXTRA_PATHS = _PATHS.endpointscfg_extra_paths
OAUTH_CLIENT_EXTRA_PATHS = _PATHS.oauth_client_extra_paths
GOOGLE_SQL_EXTRA_PATHS = _PATHS.google_sql_extra_paths
def fix_sys_path(extra_extra_paths=()):
"""Fix the sys.path to include our extra paths."""
sys.path = EXTRA_PATHS + list(extra_extra_paths) + sys.path
def run_file(file_path, globals_):
"""Execute the given script with the passed-in globals.
Args:
file_path: the path to the wrapper for the given script. This will usually
be a copy of this file.
globals_: the global bindings to be used while executing the wrapped script.
"""
script_name = os.path.basename(file_path)
sys.path = (_PATHS.script_paths(script_name) +
_PATHS.scrub_path(script_name, sys.path))
if 'google' in sys.modules:
del sys.modules['google']
execfile(_PATHS.script_file(script_name), globals_)
if __name__ == '__main__':
run_file(__file__, globals())
| apache-2.0 | 148,038,806,506,499,600 | 22.511278 | 80 | 0.709946 | false |
pombredanne/teamwork | w2/static/Brython2.0.0-20140209-164925/Lib/tokenize.py | 728 | 24424 | """Tokenization help for Python programs.
tokenize(readline) is a generator that breaks a stream of bytes into
Python tokens. It decodes the bytes according to PEP-0263 for
determining source file encoding.
It accepts a readline-like method which is called repeatedly to get the
next line of input (or b"" for EOF). It generates 5-tuples with these
members:
the token type (see token.py)
the token (a string)
the starting (row, column) indices of the token (a 2-tuple of ints)
the ending (row, column) indices of the token (a 2-tuple of ints)
the original line (string)
It is designed to match the working of the Python tokenizer exactly, except
that it produces COMMENT tokens for comments and gives type OP for all
operators. Additionally, all token lists start with an ENCODING token
which tells you which encoding was used to decode the bytes stream.
"""
__author__ = 'Ka-Ping Yee <[email protected]>'
__credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
'Skip Montanaro, Raymond Hettinger, Trent Nelson, '
'Michael Foord')
import builtins
import re
import sys
from token import *
from codecs import lookup, BOM_UTF8
import collections
from io import TextIOWrapper
cookie_re = re.compile(r'^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)', re.ASCII)
import token
__all__ = token.__all__ + ["COMMENT", "tokenize", "detect_encoding",
"NL", "untokenize", "ENCODING", "TokenInfo"]
del token
COMMENT = N_TOKENS
tok_name[COMMENT] = 'COMMENT'
NL = N_TOKENS + 1
tok_name[NL] = 'NL'
ENCODING = N_TOKENS + 2
tok_name[ENCODING] = 'ENCODING'
N_TOKENS += 3
EXACT_TOKEN_TYPES = {
'(': LPAR,
')': RPAR,
'[': LSQB,
']': RSQB,
':': COLON,
',': COMMA,
';': SEMI,
'+': PLUS,
'-': MINUS,
'*': STAR,
'/': SLASH,
'|': VBAR,
'&': AMPER,
'<': LESS,
'>': GREATER,
'=': EQUAL,
'.': DOT,
'%': PERCENT,
'{': LBRACE,
'}': RBRACE,
'==': EQEQUAL,
'!=': NOTEQUAL,
'<=': LESSEQUAL,
'>=': GREATEREQUAL,
'~': TILDE,
'^': CIRCUMFLEX,
'<<': LEFTSHIFT,
'>>': RIGHTSHIFT,
'**': DOUBLESTAR,
'+=': PLUSEQUAL,
'-=': MINEQUAL,
'*=': STAREQUAL,
'/=': SLASHEQUAL,
'%=': PERCENTEQUAL,
'&=': AMPEREQUAL,
'|=': VBAREQUAL,
'^=': CIRCUMFLEXEQUAL,
'<<=': LEFTSHIFTEQUAL,
'>>=': RIGHTSHIFTEQUAL,
'**=': DOUBLESTAREQUAL,
'//': DOUBLESLASH,
'//=': DOUBLESLASHEQUAL,
'@': AT
}
class TokenInfo(collections.namedtuple('TokenInfo', 'type string start end line')):
def __repr__(self):
annotated_type = '%d (%s)' % (self.type, tok_name[self.type])
return ('TokenInfo(type=%s, string=%r, start=%r, end=%r, line=%r)' %
self._replace(type=annotated_type))
@property
def exact_type(self):
if self.type == OP and self.string in EXACT_TOKEN_TYPES:
return EXACT_TOKEN_TYPES[self.string]
else:
return self.type
def group(*choices): return '(' + '|'.join(choices) + ')'
def any(*choices): return group(*choices) + '*'
def maybe(*choices): return group(*choices) + '?'
# Note: we use unicode matching for names ("\w") but ascii matching for
# number literals.
Whitespace = r'[ \f\t]*'
Comment = r'#[^\r\n]*'
Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
Name = r'\w+'
Hexnumber = r'0[xX][0-9a-fA-F]+'
Binnumber = r'0[bB][01]+'
Octnumber = r'0[oO][0-7]+'
Decnumber = r'(?:0+|[1-9][0-9]*)'
Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber)
Exponent = r'[eE][-+]?[0-9]+'
Pointfloat = group(r'[0-9]+\.[0-9]*', r'\.[0-9]+') + maybe(Exponent)
Expfloat = r'[0-9]+' + Exponent
Floatnumber = group(Pointfloat, Expfloat)
Imagnumber = group(r'[0-9]+[jJ]', Floatnumber + r'[jJ]')
Number = group(Imagnumber, Floatnumber, Intnumber)
StringPrefix = r'(?:[bB][rR]?|[rR][bB]?|[uU])?'
# Tail end of ' string.
Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
# Tail end of " string.
Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
# Tail end of ''' string.
Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
# Tail end of """ string.
Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
Triple = group(StringPrefix + "'''", StringPrefix + '"""')
# Single-line ' or " string.
String = group(StringPrefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
StringPrefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
# Because of leftmost-then-longest match semantics, be sure to put the
# longest operators first (e.g., if = came before ==, == would get
# recognized as two instances of =).
Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"!=",
r"//=?", r"->",
r"[+\-*/%&|^=<>]=?",
r"~")
Bracket = '[][(){}]'
Special = group(r'\r?\n', r'\.\.\.', r'[:;.,@]')
Funny = group(Operator, Bracket, Special)
PlainToken = group(Number, Funny, String, Name)
Token = Ignore + PlainToken
# First (or only) line of ' or " string.
ContStr = group(StringPrefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
group("'", r'\\\r?\n'),
StringPrefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
group('"', r'\\\r?\n'))
PseudoExtras = group(r'\\\r?\n|\Z', Comment, Triple)
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
def _compile(expr):
return re.compile(expr, re.UNICODE)
endpats = {"'": Single, '"': Double,
"'''": Single3, '"""': Double3,
"r'''": Single3, 'r"""': Double3,
"b'''": Single3, 'b"""': Double3,
"R'''": Single3, 'R"""': Double3,
"B'''": Single3, 'B"""': Double3,
"br'''": Single3, 'br"""': Double3,
"bR'''": Single3, 'bR"""': Double3,
"Br'''": Single3, 'Br"""': Double3,
"BR'''": Single3, 'BR"""': Double3,
"rb'''": Single3, 'rb"""': Double3,
"Rb'''": Single3, 'Rb"""': Double3,
"rB'''": Single3, 'rB"""': Double3,
"RB'''": Single3, 'RB"""': Double3,
"u'''": Single3, 'u"""': Double3,
"R'''": Single3, 'R"""': Double3,
"U'''": Single3, 'U"""': Double3,
'r': None, 'R': None, 'b': None, 'B': None,
'u': None, 'U': None}
triple_quoted = {}
for t in ("'''", '"""',
"r'''", 'r"""', "R'''", 'R"""',
"b'''", 'b"""', "B'''", 'B"""',
"br'''", 'br"""', "Br'''", 'Br"""',
"bR'''", 'bR"""', "BR'''", 'BR"""',
"rb'''", 'rb"""', "rB'''", 'rB"""',
"Rb'''", 'Rb"""', "RB'''", 'RB"""',
"u'''", 'u"""', "U'''", 'U"""',
):
triple_quoted[t] = t
single_quoted = {}
for t in ("'", '"',
"r'", 'r"', "R'", 'R"',
"b'", 'b"', "B'", 'B"',
"br'", 'br"', "Br'", 'Br"',
"bR'", 'bR"', "BR'", 'BR"' ,
"rb'", 'rb"', "rB'", 'rB"',
"Rb'", 'Rb"', "RB'", 'RB"' ,
"u'", 'u"', "U'", 'U"',
):
single_quoted[t] = t
tabsize = 8
class TokenError(Exception): pass
class StopTokenizing(Exception): pass
class Untokenizer:
def __init__(self):
self.tokens = []
self.prev_row = 1
self.prev_col = 0
self.encoding = None
def add_whitespace(self, start):
row, col = start
assert row <= self.prev_row
col_offset = col - self.prev_col
if col_offset:
self.tokens.append(" " * col_offset)
def untokenize(self, iterable):
for t in iterable:
if len(t) == 2:
self.compat(t, iterable)
break
tok_type, token, start, end, line = t
if tok_type == ENCODING:
self.encoding = token
continue
self.add_whitespace(start)
self.tokens.append(token)
self.prev_row, self.prev_col = end
if tok_type in (NEWLINE, NL):
self.prev_row += 1
self.prev_col = 0
return "".join(self.tokens)
def compat(self, token, iterable):
startline = False
indents = []
toks_append = self.tokens.append
toknum, tokval = token
if toknum in (NAME, NUMBER):
tokval += ' '
if toknum in (NEWLINE, NL):
startline = True
prevstring = False
for tok in iterable:
toknum, tokval = tok[:2]
if toknum == ENCODING:
self.encoding = tokval
continue
if toknum in (NAME, NUMBER):
tokval += ' '
# Insert a space between two consecutive strings
if toknum == STRING:
if prevstring:
tokval = ' ' + tokval
prevstring = True
else:
prevstring = False
if toknum == INDENT:
indents.append(tokval)
continue
elif toknum == DEDENT:
indents.pop()
continue
elif toknum in (NEWLINE, NL):
startline = True
elif startline and indents:
toks_append(indents[-1])
startline = False
toks_append(tokval)
def untokenize(iterable):
"""Transform tokens back into Python source code.
It returns a bytes object, encoded using the ENCODING
token, which is the first token sequence output by tokenize.
Each element returned by the iterable must be a token sequence
with at least two elements, a token number and token value. If
only two tokens are passed, the resulting output is poor.
Round-trip invariant for full input:
Untokenized source will match input source exactly
Round-trip invariant for limited intput:
# Output bytes will tokenize the back to the input
t1 = [tok[:2] for tok in tokenize(f.readline)]
newcode = untokenize(t1)
readline = BytesIO(newcode).readline
t2 = [tok[:2] for tok in tokenize(readline)]
assert t1 == t2
"""
ut = Untokenizer()
out = ut.untokenize(iterable)
if ut.encoding is not None:
out = out.encode(ut.encoding)
return out
def _get_normal_name(orig_enc):
"""Imitates get_normal_name in tokenizer.c."""
# Only care about the first 12 characters.
enc = orig_enc[:12].lower().replace("_", "-")
if enc == "utf-8" or enc.startswith("utf-8-"):
return "utf-8"
if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
return "iso-8859-1"
return orig_enc
def detect_encoding(readline):
"""
The detect_encoding() function is used to detect the encoding that should
be used to decode a Python source file. It requires one argment, readline,
in the same way as the tokenize() generator.
It will call readline a maximum of twice, and return the encoding used
(as a string) and a list of any lines (left as bytes) it has read in.
It detects the encoding from the presence of a utf-8 bom or an encoding
cookie as specified in pep-0263. If both a bom and a cookie are present,
but disagree, a SyntaxError will be raised. If the encoding cookie is an
invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,
'utf-8-sig' is returned.
If no encoding is specified, then the default of 'utf-8' will be returned.
"""
try:
filename = readline.__self__.name
except AttributeError:
filename = None
bom_found = False
encoding = None
default = 'utf-8'
def read_or_stop():
try:
return readline()
except StopIteration:
return b''
def find_cookie(line):
try:
# Decode as UTF-8. Either the line is an encoding declaration,
# in which case it should be pure ASCII, or it must be UTF-8
# per default encoding.
line_string = line.decode('utf-8')
except UnicodeDecodeError:
msg = "invalid or missing encoding declaration"
if filename is not None:
msg = '{} for {!r}'.format(msg, filename)
raise SyntaxError(msg)
match = cookie_re.match(line_string)
if not match:
return None
encoding = _get_normal_name(match.group(1))
try:
codec = lookup(encoding)
except LookupError:
# This behaviour mimics the Python interpreter
if filename is None:
msg = "unknown encoding: " + encoding
else:
msg = "unknown encoding for {!r}: {}".format(filename,
encoding)
raise SyntaxError(msg)
if bom_found:
if encoding != 'utf-8':
# This behaviour mimics the Python interpreter
if filename is None:
msg = 'encoding problem: utf-8'
else:
msg = 'encoding problem for {!r}: utf-8'.format(filename)
raise SyntaxError(msg)
encoding += '-sig'
return encoding
first = read_or_stop()
if first.startswith(BOM_UTF8):
bom_found = True
first = first[3:]
default = 'utf-8-sig'
if not first:
return default, []
encoding = find_cookie(first)
if encoding:
return encoding, [first]
second = read_or_stop()
if not second:
return default, [first]
encoding = find_cookie(second)
if encoding:
return encoding, [first, second]
return default, [first, second]
def open(filename):
"""Open a file in read only mode using the encoding detected by
detect_encoding().
"""
buffer = builtins.open(filename, 'rb')
encoding, lines = detect_encoding(buffer.readline)
buffer.seek(0)
text = TextIOWrapper(buffer, encoding, line_buffering=True)
text.mode = 'r'
return text
def tokenize(readline):
"""
The tokenize() generator requires one argment, readline, which
must be a callable object which provides the same interface as the
readline() method of built-in file objects. Each call to the function
should return one line of input as bytes. Alternately, readline
can be a callable function terminating with StopIteration:
readline = open(myfile, 'rb').__next__ # Example of alternate readline
The generator produces 5-tuples with these members: the token type; the
token string; a 2-tuple (srow, scol) of ints specifying the row and
column where the token begins in the source; a 2-tuple (erow, ecol) of
ints specifying the row and column where the token ends in the source;
and the line on which the token was found. The line passed is the
logical line; continuation lines are included.
The first token sequence will always be an ENCODING token
which tells you which encoding was used to decode the bytes stream.
"""
# This import is here to avoid problems when the itertools module is not
# built yet and tokenize is imported.
from itertools import chain, repeat
encoding, consumed = detect_encoding(readline)
rl_gen = iter(readline, b"")
empty = repeat(b"")
return _tokenize(chain(consumed, rl_gen, empty).__next__, encoding)
def _tokenize(readline, encoding):
lnum = parenlev = continued = 0
numchars = '0123456789'
contstr, needcont = '', 0
contline = None
indents = [0]
if encoding is not None:
if encoding == "utf-8-sig":
# BOM will already have been stripped.
encoding = "utf-8"
yield TokenInfo(ENCODING, encoding, (0, 0), (0, 0), '')
while True: # loop over lines in stream
try:
line = readline()
except StopIteration:
line = b''
if encoding is not None:
line = line.decode(encoding)
lnum += 1
pos, max = 0, len(line)
if contstr: # continued string
if not line:
raise TokenError("EOF in multi-line string", strstart)
endmatch = endprog.match(line)
if endmatch:
pos = end = endmatch.end(0)
yield TokenInfo(STRING, contstr + line[:end],
strstart, (lnum, end), contline + line)
contstr, needcont = '', 0
contline = None
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
yield TokenInfo(ERRORTOKEN, contstr + line,
strstart, (lnum, len(line)), contline)
contstr = ''
contline = None
continue
else:
contstr = contstr + line
contline = contline + line
continue
elif parenlev == 0 and not continued: # new statement
if not line: break
column = 0
while pos < max: # measure leading whitespace
if line[pos] == ' ':
column += 1
elif line[pos] == '\t':
column = (column//tabsize + 1)*tabsize
elif line[pos] == '\f':
column = 0
else:
break
pos += 1
if pos == max:
break
if line[pos] in '#\r\n': # skip comments or blank lines
if line[pos] == '#':
comment_token = line[pos:].rstrip('\r\n')
nl_pos = pos + len(comment_token)
yield TokenInfo(COMMENT, comment_token,
(lnum, pos), (lnum, pos + len(comment_token)), line)
yield TokenInfo(NL, line[nl_pos:],
(lnum, nl_pos), (lnum, len(line)), line)
else:
yield TokenInfo((NL, COMMENT)[line[pos] == '#'], line[pos:],
(lnum, pos), (lnum, len(line)), line)
continue
if column > indents[-1]: # count indents or dedents
indents.append(column)
yield TokenInfo(INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
while column < indents[-1]:
if column not in indents:
raise IndentationError(
"unindent does not match any outer indentation level",
("<tokenize>", lnum, pos, line))
indents = indents[:-1]
yield TokenInfo(DEDENT, '', (lnum, pos), (lnum, pos), line)
else: # continued statement
if not line:
raise TokenError("EOF in multi-line statement", (lnum, 0))
continued = 0
while pos < max:
pseudomatch = _compile(PseudoToken).match(line, pos)
if pseudomatch: # scan for tokens
start, end = pseudomatch.span(1)
spos, epos, pos = (lnum, start), (lnum, end), end
if start == end:
continue
token, initial = line[start:end], line[start]
if (initial in numchars or # ordinary number
(initial == '.' and token != '.' and token != '...')):
yield TokenInfo(NUMBER, token, spos, epos, line)
elif initial in '\r\n':
yield TokenInfo(NL if parenlev > 0 else NEWLINE,
token, spos, epos, line)
elif initial == '#':
assert not token.endswith("\n")
yield TokenInfo(COMMENT, token, spos, epos, line)
elif token in triple_quoted:
endprog = _compile(endpats[token])
endmatch = endprog.match(line, pos)
if endmatch: # all on one line
pos = endmatch.end(0)
token = line[start:pos]
yield TokenInfo(STRING, token, spos, (lnum, pos), line)
else:
strstart = (lnum, start) # multiple lines
contstr = line[start:]
contline = line
break
elif initial in single_quoted or \
token[:2] in single_quoted or \
token[:3] in single_quoted:
if token[-1] == '\n': # continued string
strstart = (lnum, start)
endprog = _compile(endpats[initial] or
endpats[token[1]] or
endpats[token[2]])
contstr, needcont = line[start:], 1
contline = line
break
else: # ordinary string
yield TokenInfo(STRING, token, spos, epos, line)
elif initial.isidentifier(): # ordinary name
yield TokenInfo(NAME, token, spos, epos, line)
elif initial == '\\': # continued stmt
continued = 1
else:
if initial in '([{':
parenlev += 1
elif initial in ')]}':
parenlev -= 1
yield TokenInfo(OP, token, spos, epos, line)
else:
yield TokenInfo(ERRORTOKEN, line[pos],
(lnum, pos), (lnum, pos+1), line)
pos += 1
for indent in indents[1:]: # pop remaining indent levels
yield TokenInfo(DEDENT, '', (lnum, 0), (lnum, 0), '')
yield TokenInfo(ENDMARKER, '', (lnum, 0), (lnum, 0), '')
# An undocumented, backwards compatible, API for all the places in the standard
# library that expect to be able to use tokenize with strings
def generate_tokens(readline):
return _tokenize(readline, None)
def main():
import argparse
# Helper error handling routines
def perror(message):
print(message, file=sys.stderr)
def error(message, filename=None, location=None):
if location:
args = (filename,) + location + (message,)
perror("%s:%d:%d: error: %s" % args)
elif filename:
perror("%s: error: %s" % (filename, message))
else:
perror("error: %s" % message)
sys.exit(1)
# Parse the arguments and options
parser = argparse.ArgumentParser(prog='python -m tokenize')
parser.add_argument(dest='filename', nargs='?',
metavar='filename.py',
help='the file to tokenize; defaults to stdin')
parser.add_argument('-e', '--exact', dest='exact', action='store_true',
help='display token names using the exact type')
args = parser.parse_args()
try:
# Tokenize the input
if args.filename:
filename = args.filename
with builtins.open(filename, 'rb') as f:
tokens = list(tokenize(f.readline))
else:
filename = "<stdin>"
tokens = _tokenize(sys.stdin.readline, None)
# Output the tokenization
for token in tokens:
token_type = token.type
if args.exact:
token_type = token.exact_type
token_range = "%d,%d-%d,%d:" % (token.start + token.end)
print("%-20s%-15s%-15r" %
(token_range, tok_name[token_type], token.string))
except IndentationError as err:
line, column = err.args[1][1:3]
error(err.args[0], filename, (line, column))
except TokenError as err:
line, column = err.args[1]
error(err.args[0], filename, (line, column))
except SyntaxError as err:
error(err, filename)
except IOError as err:
error(err)
except KeyboardInterrupt:
print("interrupted\n")
except Exception as err:
perror("unexpected error: %s" % err)
raise
if __name__ == "__main__":
main()
| gpl-2.0 | -5,714,173,533,827,550,000 | 34.812317 | 83 | 0.514699 | false |
qt911025/pw_module_system | header_operations.py | 2 | 141015 | ###################################################
# header_operations.py
# This file cfontains opcode declarations
# DO NOT EDIT THIS FILE!
###################################################
#-----------------------------------------------------------------------------
# CONTROL OPERATIONS
#-----------------------------------------------------------------------------
call_script = 1 # (call_script,<script_id>),
try_end = 3 # (try_end),
try_begin = 4 # (try_begin),
else_try = 5 # (else_try),
try_for_range = 6 # (try_for_range,<destination>,<lower_bound>,<upper_bound>),
# works like a for loop from lower-bound up to (upper-bound - 1)
try_for_range_backwards = 7 # same as above but starts from (upper-bound - 1) down-to lower bound.
# (try_for_range_backwards,<destination>,<lower_bound>,<upper_bound>),
try_for_parties = 11 # (try_for_parties,<destination>),
try_for_agents = 12 # (try_for_agents,<destination>),
store_script_param_1 = 21 # (store_script_param_1,<destination>), (within a script) stores the first script parameter.
store_script_param_2 = 22 # (store_script_param_2,<destination>), (within a script) stores the second script parameter.
store_script_param = 23 # (store_script_param,<destination>,<script_param_no>), (within a script) stores <script_param_no>th script parameter.
#-----------------------------------------------------------------------------
# CONDITION OPERATIONS
#-----------------------------------------------------------------------------
ge = 30 # (ge,<value>,<value>), greater than or equal to
eq = 31 # (eq,<value>,<value>), equal to
gt = 32 # (gt,<value>,<value>), greater than
is_between = 33 # (is_between,<value>,<lower_bound>,<upper_bound>), greater than or equal to lower bound and less than upper bound
entering_town = 36 # (entering_town,<town_id>),
map_free = 37 # (map_free),
encountered_party_is_attacker = 39 # (encountered_party_is_attacker),
conversation_screen_is_active = 42 # (conversation_screen_active), used in mission template triggers only
set_player_troop = 47 # (set_player_troop,<troop_id>),
store_repeat_object = 50 # (store_repeat_object,<destination>), stores the index of a repeated dialog option for repeat_for_factions, etc.
set_result_string = 60 # (set_result_string, <string_id>), sets the result string for game scripts that need one
key_is_down = 70 # (key_is_down, <key_id>), fails if the key is not currently down (key_is_down, <key_id>),
key_clicked = 71 # (key_clicked, <key_id>), fails if the key is not clicked on the specific frame
game_key_is_down = 72 # (game_key_is_down, <game_key_id>), fails if the game key is not currently down
game_key_clicked = 73 # (game_key_clicked, <game_key_id>), fails if the game key is not clicked on the specific frame
mouse_get_position = 75 # (mouse_get_position, <position_no>), x and y values of position are filled
omit_key_once = 77 # (omit_key_once, <key_id>), game omits any bound action for the key once
clear_omitted_keys = 78 # (clear_omitted_keys),
get_global_cloud_amount = 90 # (get_global_cloud_amount, <destination>), returns a value between 0-100
set_global_cloud_amount = 91 # (set_global_cloud_amount, <value>), value is clamped to 0-100
get_global_haze_amount = 92 # (get_global_haze_amount, <destination>), returns a value between 0-100
set_global_haze_amount = 93 # (set_global_haze_amount, <value>), value is clamped to 0-100
hero_can_join = 101 # (hero_can_join, [party_id]),
hero_can_join_as_prisoner = 102 # (hero_can_join_as_prisoner, [party_id]),
party_can_join = 103 # (party_can_join),
party_can_join_as_prisoner = 104 # (party_can_join_as_prisoner),
troops_can_join = 105 # (troops_can_join,<value>),
troops_can_join_as_prisoner = 106 # (troops_can_join_as_prisoner,<value>),
party_can_join_party = 107 # (party_can_join_party, <joiner_party_id>, <host_party_id>,[flip_prisoners]),
party_end_battle = 108 # (party_end_battle,<party_no>),
main_party_has_troop = 110 # (main_party_has_troop,<troop_id>),
party_is_in_town = 130 # (party_is_in_town,<party_id_1>,<party_id_2>),
party_is_in_any_town = 131 # (party_is_in_any_town,<party_id>),
party_is_active = 132 # (party_is_active,<party_id>),
player_has_item = 150 # (player_has_item,<item_id>),
troop_has_item_equipped = 151 # (troop_has_item_equipped,<troop_id>,<item_id>),
troop_is_mounted = 152 # (troop_is_mounted,<troop_id>),
troop_is_guarantee_ranged = 153 # (troop_is_guarantee_ranged, <troop_id>),
troop_is_guarantee_horse = 154 # (troop_is_guarantee_horse, <troop_id>),
check_quest_active = 200 # (check_quest_active,<quest_id>),
check_quest_finished = 201 # (check_quest_finished,<quest_id>),
check_quest_succeeded = 202 # (check_quest_succeeded,<quest_id>),
check_quest_failed = 203 # (check_quest_failed,<quest_id>),
check_quest_concluded = 204 # (check_quest_concluded,<quest_id>),
is_trial_version = 250 # (is_trial_version),
is_edit_mode_enabled = 255 # (is_edit_mode_enabled),
options_get_damage_to_player = 260 # (options_get_damage_to_player, <destination>), 0 = 1/4, 1 = 1/2, 2 = 1/1
options_set_damage_to_player = 261 # (options_set_damage_to_player, <value>), 0 = 1/4, 1 = 1/2, 2 = 1/1
options_get_damage_to_friends = 262 # (options_get_damage_to_friends, <destination>), 0 = 1/2, 1 = 3/4, 2 = 1/1
options_set_damage_to_friends = 263 # (options_set_damage_to_friends, <value>), 0 = 1/2, 1 = 3/4, 2 = 1/1
options_get_combat_ai = 264 # (options_get_combat_ai, <destination>), 0 = good, 1 = average, 2 = poor
options_set_combat_ai = 265 # (options_set_combat_ai, <value>), 0 = good, 1 = average, 2 = poor
options_get_campaign_ai = 266 # (options_get_campaign_ai, <destination>), 0 = good, 1 = average, 2 = poor
options_set_campaign_ai = 267 # (options_set_campaign_ai, <value>), 0 = good, 1 = average, 2 = poor
options_get_combat_speed = 268 # (options_get_combat_speed, <destination>), 0 = slowest, 1 = slower, 2 = normal, 3 = faster, 4 = fastest
options_set_combat_speed = 269 # (options_set_combat_speed, <value>), 0 = slowest, 1 = slower, 2 = normal, 3 = faster, 4 = fastest
profile_get_banner_id = 350 # (profile_get_banner_id, <destination>),
profile_set_banner_id = 351 # (profile_set_banner_id, <value>),
get_achievement_stat = 370 # (get_achievement_stat, <destination>, <achievement_id>, <stat_index>),
set_achievement_stat = 371 # (set_achievement_stat, <achievement_id>, <stat_index>, <value>),
unlock_achievement = 372 # (unlock_achievement, <achievement_id>),
send_message_to_url = 380 # (send_message_to_url, <string_id>, <encode_url>), result will be returned to script_game_receive_url_response
multiplayer_send_message_to_server = 388 # (multiplayer_send_int_to_server, <message_type>),
multiplayer_send_int_to_server = 389 # (multiplayer_send_int_to_server, <message_type>, <value>),
multiplayer_send_2_int_to_server = 390 # (multiplayer_send_2_int_to_server, <message_type>, <value>, <value>),
multiplayer_send_3_int_to_server = 391 # (multiplayer_send_3_int_to_server, <message_type>, <value>, <value>, <value>),
multiplayer_send_4_int_to_server = 392 # (multiplayer_send_4_int_to_server, <message_type>, <value>, <value>, <value>, <value>),
multiplayer_send_string_to_server = 393 # (multiplayer_send_string_to_server, <message_type>, <string_id>),
multiplayer_send_message_to_player = 394 # (multiplayer_send_message_to_player, <player_id>, <message_type>),
multiplayer_send_int_to_player = 395 # (multiplayer_send_int_to_player, <player_id>, <message_type>, <value>),
multiplayer_send_2_int_to_player = 396 # (multiplayer_send_2_int_to_player, <player_id>, <message_type>, <value>, <value>),
multiplayer_send_3_int_to_player = 397 # (multiplayer_send_3_int_to_player, <player_id>, <message_type>, <value>, <value>, <value>),
multiplayer_send_4_int_to_player = 398 # (multiplayer_send_4_int_to_player, <player_id>, <message_type>, <value>, <value>, <value>, <value>),
multiplayer_send_string_to_player = 399 # (multiplayer_send_string_to_player, <player_id>, <message_type>, <string_id>),
get_max_players = 400 # (get_max_players, <destination>),
player_is_active = 401 # (player_is_active, <player_id>),
player_get_team_no = 402 # (player_get_team_no, <destination>, <player_id>),
player_set_team_no = 403 # (player_get_team_no, <destination>, <player_id>),
player_get_troop_id = 404 # (player_get_troop_id, <destination>, <player_id>),
player_set_troop_id = 405 # (player_get_troop_id, <destination>, <player_id>),
player_get_agent_id = 406 # (player_get_agent_id, <destination>, <player_id>),
player_get_gold = 407 # (player_get_gold, <destination>, <player_id>),
player_set_gold = 408 # (player_set_gold, <player_id>, <value>, <max_value>), set max_value to 0 if no limit is wanted
player_spawn_new_agent = 409 # (player_spawn_new_agent, <player_id>, <entry_point>),
player_add_spawn_item = 410 # (player_add_spawn_item, <player_id>, <item_slot_no>, <item_id>),
multiplayer_get_my_team = 411 # (multiplayer_get_my_team, <destination>),
multiplayer_get_my_troop = 412 # (multiplayer_get_my_troop, <destination>),
multiplayer_set_my_troop = 413 # (multiplayer_get_my_troop, <destination>),
multiplayer_get_my_gold = 414 # (multiplayer_get_my_gold, <destination>),
multiplayer_get_my_player = 415 # (multiplayer_get_my_player, <destination>),
multiplayer_clear_scene = 416 # (multiplayer_clear_scene),
multiplayer_is_server = 417 # (multiplayer_is_server),
multiplayer_is_dedicated_server = 418 # (multiplayer_is_dedicated_server),
game_in_multiplayer_mode = 419 # (game_in_multiplayer_mode),
multiplayer_make_everyone_enemy = 420 # (multiplayer_make_everyone_enemy),
player_control_agent = 421 # (player_control_agent, <player_id>, <agent_id>),
player_get_item_id = 422 # (player_get_item_id, <destination>, <player_id>, <item_slot_no>) only for server
player_get_banner_id = 423 # (player_get_banner_id, <destination>, <player_id>),
game_get_reduce_campaign_ai = 424 # (game_get_reduce_campaign_ai, <destination>), depreciated, use options_get_campaign_ai instead
multiplayer_find_spawn_point = 425 # (multiplayer_find_spawn_point, <destination>, <team_no>, <examine_all_spawn_points>, <is_horseman>),
set_spawn_effector_scene_prop_kind = 426 # (set_spawn_effector_scene_prop_kind <team_no> <scene_prop_kind_no>)
set_spawn_effector_scene_prop_id = 427 # (set_spawn_effector_scene_prop_id <scene_prop_id>)
player_set_is_admin = 429 # (player_set_is_admin, <player_id>, <value>), value is 0 or 1
player_is_admin = 430 # (player_is_admin, <player_id>),
player_get_score = 431 # (player_get_score, <destination>, <player_id>),
player_set_score = 432 # (player_set_score,<player_id>, <value>),
player_get_kill_count = 433 # (player_get_kill_count, <destination>, <player_id>),
player_set_kill_count = 434 # (player_set_kill_count,<player_id>, <value>),
player_get_death_count = 435 # (player_get_death_count, <destination>, <player_id>),
player_set_death_count = 436 # (player_set_death_count, <player_id>, <value>),
player_get_ping = 437 # (player_get_ping, <destination>, <player_id>),
player_is_busy_with_menus = 438 # (player_is_busy_with_menus, <player_id>),
player_get_is_muted = 439 # (player_get_is_muted, <destination>, <player_id>),
player_set_is_muted = 440 # (player_set_is_muted, <player_id>, <value>, [mute_for_everyone]),
# mute_for_everyone optional parameter should be set to 1 if player is muted for everyone (this works only on server).
player_get_unique_id = 441 # (player_get_unique_id, <destination>, <player_id>), can only bew used on server side
player_get_gender = 442 # (player_get_gender, <destination>, <player_id>),
team_get_bot_kill_count = 450 # (team_get_bot_kill_count, <destination>, <team_id>),
team_set_bot_kill_count = 451 # (team_get_bot_kill_count, <destination>, <team_id>),
team_get_bot_death_count = 452 # (team_get_bot_death_count, <destination>, <team_id>),
team_set_bot_death_count = 453 # (team_get_bot_death_count, <destination>, <team_id>),
team_get_kill_count = 454 # (team_get_kill_count, <destination>, <team_id>),
team_get_score = 455 # (team_get_score, <destination>, <team_id>),
team_set_score = 456 # (team_set_score, <team_id>, <value>),
team_set_faction = 457 # (team_set_faction, <team_id>, <faction_id>),
team_get_faction = 458 # (team_get_faction, <destination>, <team_id>),
player_save_picked_up_items_for_next_spawn = 459 # (player_save_picked_up_items_for_next_spawn, <player_id>),
player_get_value_of_original_items = 460 # (player_get_value_of_original_items, <player_id>), default troop items will be counted as zero (except horse)
player_item_slot_is_picked_up = 461 # (player_item_slot_is_picked_up, <player_id>, <item_slot_no>),
# item slots are overriden when player picks up an item and stays alive until the next round
kick_player = 465 # (kick_player, <player_id>),
ban_player = 466 # (ban_player, <player_id>, <value>, <player_id>), set value = 1 for banning temporarily, assign 2nd player an admin if banning is permanent
save_ban_info_of_player = 467 # (save_ban_info_of_player, <player_id>),
ban_player_using_saved_ban_info = 468 # (ban_player_using_saved_ban_info),
start_multiplayer_mission = 470 # (start_multiplayer_mission, <mission_template_id>, <scene_id>, <started_manually>),
server_add_message_to_log = 473 # (server_add_message_to_log, <string_id>),
server_get_renaming_server_allowed = 475 # (server_get_renaming_server_allowed, <destination>), 0-1
server_get_changing_game_type_allowed = 476 # (server_get_changing_game_type_allowed, <destination>), 0-1
server_get_combat_speed = 478 # (server_get_combat_speed, <destination>), 0-2
server_set_combat_speed = 479 # (server_set_combat_speed, <value>), 0-2
server_get_friendly_fire = 480 # (server_get_friendly_fire, <destination>),
server_set_friendly_fire = 481 # (server_set_friendly_fire, <value>), 0 = off, 1 = on
server_get_control_block_dir = 482 # (server_get_control_block_dir, <destination>),
server_set_control_block_dir = 483 # (server_set_control_block_dir, <value>), 0 = automatic, 1 = by mouse movement
server_set_password = 484 # (server_set_password, <string_id>),
server_get_add_to_game_servers_list = 485 # (server_get_add_to_game_servers_list, <destination>),
server_set_add_to_game_servers_list = 486 # (server_set_add_to_game_servers_list, <value>),
server_get_ghost_mode = 487 # (server_get_ghost_mode, <destination>),
server_set_ghost_mode = 488 # (server_set_ghost_mode, <value>),
server_set_name = 489 # (server_set_name, <string_id>),
server_get_max_num_players = 490 # (server_get_max_num_players, <destination>),
server_set_max_num_players = 491 # (server_set_max_num_players, <value>),
server_set_welcome_message = 492 # (server_set_welcome_message, <string_id>),
server_get_melee_friendly_fire = 493 # (server_get_melee_friendly_fire, <destination>),
server_set_melee_friendly_fire = 494 # (server_set_melee_friendly_fire, <value>), 0 = off, 1 = on
server_get_friendly_fire_damage_self_ratio = 495 # (server_get_friendly_fire_damage_self_ratio, <destination>),
server_set_friendly_fire_damage_self_ratio = 496 # (server_set_friendly_fire_damage_self_ratio, <value>), 0-100
server_get_friendly_fire_damage_friend_ratio = 497 # (server_get_friendly_fire_damage_friend_ratio, <destination>),
server_set_friendly_fire_damage_friend_ratio = 498 # (server_set_friendly_fire_damage_friend_ratio, <value>), 0-100
server_get_anti_cheat = 499 # (server_get_anti_cheat, <destination>),
server_set_anti_cheat = 477 # (server_set_anti_cheat, <value>), 0 = off, 1 = on
troop_set_slot = 500 # (troop_set_slot,<troop_id>,<slot_no>,<value>),
party_set_slot = 501 # (party_set_slot,<party_id>,<slot_no>,<value>),
faction_set_slot = 502 # (faction_set_slot,<faction_id>,<slot_no>,<value>),
scene_set_slot = 503 # (scene_set_slot,<scene_id>,<slot_no>,<value>),
party_template_set_slot = 504 # (party_template_set_slot,<party_template_id>,<slot_no>,<value>),
agent_set_slot = 505 # (agent_set_slot,<agent_id>,<slot_no>,<value>),
quest_set_slot = 506 # (quest_set_slot,<quest_id>,<slot_no>,<value>),
item_set_slot = 507 # (item_set_slot,<item_id>,<slot_no>,<value>),
player_set_slot = 508 # (player_set_slot,<player_id>,<slot_no>,<value>),
team_set_slot = 509 # (team_set_slot,<team_id>,<slot_no>,<value>),
scene_prop_set_slot = 510 # (scene_prop_set_slot,<scene_prop_instance_id>,<slot_no>,<value>),
troop_get_slot = 520 # (troop_get_slot,<destination>,<troop_id>,<slot_no>),
party_get_slot = 521 # (party_get_slot,<destination>,<party_id>,<slot_no>),
faction_get_slot = 522 # (faction_get_slot,<destination>,<faction_id>,<slot_no>),
scene_get_slot = 523 # (scene_get_slot,<destination>,<scene_id>,<slot_no>),
party_template_get_slot = 524 # (party_template_get_slot,<destination>,<party_template_id>,<slot_no>),
agent_get_slot = 525 # (agent_get_slot,<destination>,<agent_id>,<slot_no>),
quest_get_slot = 526 # (quest_get_slot,<destination>,<quest_id>,<slot_no>),
item_get_slot = 527 # (item_get_slot,<destination>,<item_id>,<slot_no>),
player_get_slot = 528 # (player_get_slot,<destination>,<player_id>,<slot_no>),
team_get_slot = 529 # (team_get_slot,<destination>,<player_id>,<slot_no>),
scene_prop_get_slot = 530 # (scene_prop_get_slot,<destination>,<scene_prop_instance_id>,<slot_no>),
troop_slot_eq = 540 # (troop_slot_eq,<troop_id>,<slot_no>,<value>),
party_slot_eq = 541 # (party_slot_eq,<party_id>,<slot_no>,<value>),
faction_slot_eq = 542 # (faction_slot_eq,<faction_id>,<slot_no>,<value>),
scene_slot_eq = 543 # (scene_slot_eq,<scene_id>,<slot_no>,<value>),
party_template_slot_eq = 544 # (party_template_slot_eq,<party_template_id>,<slot_no>,<value>),
agent_slot_eq = 545 # (agent_slot_eq,<agent_id>,<slot_no>,<value>),
quest_slot_eq = 546 # (quest_slot_eq,<quest_id>,<slot_no>,<value>),
item_slot_eq = 547 # (item_slot_eq,<item_id>,<slot_no>,<value>),
player_slot_eq = 548 # (player_slot_eq,<player_id>,<slot_no>,<value>),
team_slot_eq = 549 # (team_slot_eq,<team_id>,<slot_no>,<value>),
scene_prop_slot_eq = 550 # (scene_prop_slot_eq,<scene_prop_instance_id>,<slot_no>,<value>),
troop_slot_ge = 560 # (troop_slot_ge,<troop_id>,<slot_no>,<value>),
party_slot_ge = 561 # (party_slot_ge,<party_id>,<slot_no>,<value>),
faction_slot_ge = 562 # (faction_slot_ge,<faction_id>,<slot_no>,<value>),
scene_slot_ge = 563 # (scene_slot_ge,<scene_id>,<slot_no>,<value>),
party_template_slot_ge = 564 # (party_template_slot_ge,<party_template_id>,<slot_no>,<value>),
agent_slot_ge = 565 # (agent_slot_ge,<agent_id>,<slot_no>,<value>),
quest_slot_ge = 566 # (quest_slot_ge,<quest_id>,<slot_no>,<value>),
item_slot_ge = 567 # (item_slot_ge,<item_id>,<slot_no>,<value>),
player_slot_ge = 568 # (player_slot_ge,<player_id>,<slot_no>,<value>),
team_slot_ge = 569 # (team_slot_ge,<team_id>,<slot_no>,<value>),
scene_prop_slot_ge = 570 # (scene_prop_slot_ge,<scene_prop_instance_id>,<slot_no>,<value>),
play_sound_at_position = 599 # (play_sound_at_position, <sound_id>, <position_no>, [options]),
play_sound = 600 # (play_sound,<sound_id>,[options]),
play_track = 601 # (play_track,<track_id>, [options]), 0 = default, 1 = fade out current track, 2 = stop current track
play_cue_track = 602 # (play_cue_track,<track_id>), starts immediately
music_set_situation = 603 # (music_set_situation, <situation_type>),
music_set_culture = 604 # (music_set_culture, <culture_type>),
stop_all_sounds = 609 # (stop_all_sounds, [options]), 0 = stop only looping sounds, 1 = stop all sounds
store_last_sound_channel = 615 # (store_last_sound_channel, <destination>),
stop_sound_channel = 616 # (stop_sound_channel, <sound_channel_no>),
copy_position = 700 # (copy_position,<position_no_1>,<position_no_2>), copies position_no_2 to position_no_1
init_position = 701 # (init_position,<position_no>),
get_trigger_object_position = 702 # (get_trigger_object_position,<position_no>),
get_angle_between_positions = 705 # (get_angle_between_positions, <destination_fixed_point>, <position_no_1>, <position_no_2>),
position_has_line_of_sight_to_position = 707 # (position_has_line_of_sight_to_position, <position_no_1>, <position_no_2>),
get_distance_between_positions = 710 # (get_distance_between_positions,<destination>,<position_no_1>,<position_no_2>), gets distance in centimeters.
get_distance_between_positions_in_meters = 711 # (get_distance_between_positions_in_meters,<destination>,<position_no_1>,<position_no_2>), gets distance in meters.
get_sq_distance_between_positions = 712 # (get_sq_distance_between_positions,<destination>,<position_no_1>,<position_no_2>), gets squared distance in centimeters
get_sq_distance_between_positions_in_meters = 713 # (get_sq_distance_between_positions_in_meters,<destination>,<position_no_1>,<position_no_2>), gets squared distance in meters
position_is_behind_position = 714 # (position_is_behind_position,<position_no_1>,<position_no_2>),
get_sq_distance_between_position_heights = 715 # (get_sq_distance_between_position_heights,<destination>,<position_no_1>,<position_no_2>), gets squared distance in centimeters
position_transform_position_to_parent = 716 # (position_transform_position_to_parent,<dest_position_no>,<position_no>,<position_no_to_be_transformed>),
position_transform_position_to_local = 717 # (position_transform_position_to_local, <dest_position_no>,<position_no>,<position_no_to_be_transformed>),
position_copy_rotation = 718 # (position_copy_rotation,<position_no_1>,<position_no_2>), copies rotation of position_no_2 to position_no_1
position_copy_origin = 719 # (position_copy_origin,<position_no_1>,<position_no_2>), copies origin of position_no_2 to position_no_1
position_move_x = 720 # (position_move_x,<position_no>,<movement>,[value]), movement is in cms, [0 = local; 1=global]
position_move_y = 721 # (position_move_y,<position_no>,<movement>,[value]),
position_move_z = 722 # (position_move_z,<position_no>,<movement>,[value]),
position_rotate_x = 723 # (position_rotate_x,<position_no>,<angle>),
position_rotate_y = 724 # (position_rotate_y,<position_no>,<angle>),
position_rotate_z = 725 # (position_rotate_z,<position_no>,<angle>,[use_global_z_axis]), set use_global_z_axis as 1 if needed, otherwise you don't have to give that.
position_get_x = 726 # (position_get_x,<destination_fixed_point>,<position_no>), x position in meters * fixed point multiplier is returned
position_get_y = 727 # (position_get_y,<destination_fixed_point>,<position_no>), y position in meters * fixed point multiplier is returned
position_get_z = 728 # (position_get_z,<destination_fixed_point>,<position_no>), z position in meters * fixed point multiplier is returned
position_set_x = 729 # (position_set_x,<position_no>,<value_fixed_point>), meters / fixed point multiplier is set
position_set_y = 730 # (position_set_y,<position_no>,<value_fixed_point>), meters / fixed point multiplier is set
position_set_z = 731 # (position_set_z,<position_no>,<value_fixed_point>), meters / fixed point multiplier is set
position_get_scale_x = 735 # (position_get_scale_x,<destination_fixed_point>,<position_no>), x scale in meters * fixed point multiplier is returned
position_get_scale_y = 736 # (position_get_scale_y,<destination_fixed_point>,<position_no>), y scale in meters * fixed point multiplier is returned
position_get_scale_z = 737 # (position_get_scale_z,<destination_fixed_point>,<position_no>), z scale in meters * fixed point multiplier is returned
position_rotate_x_floating = 738 # (position_rotate_x_floating,<position_no>,<angle>), angle in degree * fixed point multiplier
position_rotate_y_floating = 739 # (position_rotate_y_floating,<position_no>,<angle>), angle in degree * fixed point multiplier
position_get_rotation_around_z = 740 # (position_get_rotation_around_z,<destination>,<position_no>), rotation around z axis is returned as angle
position_normalize_origin = 741 # (position_normalize_origin,<destination_fixed_point>,<position_no>),
# destination = convert_to_fixed_point(length(position.origin))
# position.origin *= 1/length(position.origin) so it normalizes the origin vector
position_get_rotation_around_x = 742 # (position_get_rotation_around_x, <destination>, <position_no>), rotation around x axis is returned as angle
position_get_rotation_around_y = 743 # (position_get_rotation_around_y, <destination>, <position_no>), rotation around y axis is returned as angle
position_set_scale_x = 744 # (position_set_scale_x, <position_no>, <value_fixed_point>), x scale in meters / fixed point multiplier is set
position_set_scale_y = 745 # (position_set_scale_y, <position_no>, <value_fixed_point>), y scale in meters / fixed point multiplier is set
position_set_scale_z = 746 # (position_set_scale_z, <position_no>, <value_fixed_point>), z scale in meters / fixed point multiplier is set
position_get_screen_projection = 750 # (position_get_screen_projection, <position_no_1>, <position_no_2>), returns screen projection of position_no_2 to position_no_1
position_set_z_to_ground_level = 791 # (position_set_z_to_ground_level, <position_no>), only works during a mission
position_get_distance_to_terrain = 792 # (position_get_distance_to_terrain, <destination>, <position_no>), only works during a mission
position_get_distance_to_ground_level = 793 # (position_get_distance_to_ground_level, <destination>, <position_no>), only works during a mission
start_presentation = 900 # (start_presentation, <presentation_id>),
start_background_presentation = 901 # (start_background_presentation, <presentation_id>), can only be used in game menus
presentation_set_duration = 902 # (presentation_set_duration, <duration-in-1/100-seconds>), there must be an active presentation
is_presentation_active = 903 # (is_presentation_active, <presentation_id),
create_text_overlay = 910 # (create_text_overlay, <destination>, <string_id>), returns overlay id
create_mesh_overlay = 911 # (create_mesh_overlay, <destination>, <mesh_id>), returns overlay id
create_button_overlay = 912 # (create_button_overlay, <destination>, <string_id>), returns overlay id
create_image_button_overlay = 913 # (create_image_button_overlay, <destination>, <mesh_id>, <mesh_id>), returns overlay id. second mesh is the pressed button mesh
create_slider_overlay = 914 # (create_slider_overlay, <destination>, <min_value>, <max_value>), returns overlay id
create_progress_overlay = 915 # (create_progress_overlay, <destination>, <min_value>, <max_value>), returns overlay id
create_combo_button_overlay = 916 # (create_combo_button_overlay, <destination>), returns overlay id
create_text_box_overlay = 917 # (create_text_box_overlay, <destination>), returns overlay id
create_check_box_overlay = 918 # (create_check_box_overlay, <destination>), returns overlay id
create_simple_text_box_overlay = 919 # (create_simple_text_box_overlay, <destination>), returns overlay id
overlay_set_text = 920 # (overlay_set_text, <overlay_id>, <string_id>),
overlay_set_color = 921 # (overlay_set_color, <overlay_id>, <color>), color in RGB format like 0xRRGGBB (put hexadecimal values for RR GG and BB parts)
overlay_set_alpha = 922 # (overlay_set_alpha, <overlay_id>, <alpha>), alpha in A format like 0xAA (put hexadecimal values for AA part)
overlay_set_hilight_color = 923 # (overlay_set_hilight_color, <overlay_id>, <color>), color in RGB format like 0xRRGGBB (put hexadecimal values for RR GG and BB parts)
overlay_set_hilight_alpha = 924 # (overlay_set_hilight_alpha, <overlay_id>, <alpha>), alpha in A format like 0xAA (put hexadecimal values for AA part)
overlay_set_size = 925 # (overlay_set_size, <overlay_id>, <position_no>), position's x and y values are used
overlay_set_position = 926 # (overlay_set_position, <overlay_id>, <position_no>), position's x and y values are used
overlay_set_val = 927 # (overlay_set_val, <overlay_id>, <value>), can be used for sliders, combo buttons and check boxes
overlay_set_boundaries = 928 # (overlay_set_boundaries, <overlay_id>, <min_value>, <max_value>),
overlay_set_area_size = 929 # (overlay_set_area_size, <overlay_id>, <position_no>), position's x and y values are used
overlay_set_mesh_rotation = 930 # (overlay_set_mesh_rotation, <overlay_id>, <position_no>), position's rotation values are used for rotations around x, y and z axis
overlay_add_item = 931 # (overlay_add_item, <overlay_id>, <string_id>), adds an item to the combo box
overlay_animate_to_color = 932 # (overlay_animate_to_color, <overlay_id>, <duration-in-1/1000-seconds>, <color>), alpha value will not be used
overlay_animate_to_alpha = 933 # (overlay_animate_to_alpha, <overlay_id>, <duration-in-1/1000-seconds>, <color>), only alpha value will be used
overlay_animate_to_highlight_color = 934 # (overlay_animate_to_highlight_color, <overlay_id>, <duration-in-1/1000-seconds>, <color>), alpha value will not be used
overlay_animate_to_highlight_alpha = 935 # (overlay_animate_to_highlight_alpha, <overlay_id>, <duration-in-1/1000-seconds>, <color>), only alpha value will be used
overlay_animate_to_size = 936 # (overlay_animate_to_size, <overlay_id>, <duration-in-1/1000-seconds>, <position_no>), position's x and y values are used as
overlay_animate_to_position = 937 # (overlay_animate_to_position, <overlay_id>, <duration-in-1/1000-seconds>, <position_no>), position's x and y values are used as
create_image_button_overlay_with_tableau_material = 938 # (create_image_button_overlay_with_tableau_material, <destination>, <mesh_id>, <tableau_material_id>, <value>),
# returns overlay id, value is passed to tableau_material, when mesh_id is -1, a default mesh is generated automatically
create_mesh_overlay_with_tableau_material = 939 # (create_mesh_overlay_with_tableau_material, <destination>, <mesh_id>, <tableau_material_id>, <value>),
# returns overlay id, value is passed to tableau_material, when mesh_id is -1, a default mesh is generated automatically
create_game_button_overlay = 940 # (create_game_button_overlay, <destination>, <string_id>), returns overlay id
create_in_game_button_overlay = 941 # (create_in_game_button_overlay, <destination>, <string_id>), returns overlay id
create_number_box_overlay = 942 # (create_number_box_overlay, <destination>, <min_value>, <max_value>), returns overlay id
create_listbox_overlay = 943 # (create_list_box_overlay, <destination>), returns overlay id
create_mesh_overlay_with_item_id = 944 # (create_mesh_overlay_with_item_id, <destination>, <item_id>), returns overlay id.
set_container_overlay = 945 # (set_container_overlay, <overlay_id>), sets the container overlay that new overlays will attach to. give -1 to reset
overlay_get_position = 946 # (overlay_get_position, <destination>, <overlay_id>)
overlay_set_display = 947 # (overlay_set_display, <overlay_id>, <value>), shows/hides overlay (1 = show, 0 = hide)
create_combo_label_overlay = 948 # (create_combo_label_overlay, <destination>), returns overlay id
overlay_obtain_focus = 949 # (overlay_obtain_focus, <overlay_id>), works for textboxes only
overlay_set_tooltip = 950 # (overlay_set_tooltip, <overlay_id>, <string_id>),
overlay_set_container_overlay = 951 # (overlay_set_container_overlay, <overlay_id>, <container_overlay_id>) -1 to reset
overlay_set_additional_render_height = 952 # (overlay_set_additional_render_height, <overlay_id>, <height_adder>),
show_object_details_overlay = 960 # (show_object_details_overlay, <value>), 0 = hide, 1 = show
show_item_details = 970 # (show_item_details, <item_id>, <position_no>, <show_default_text_or_not>)
# show_default_text_or_not should be 1 for showing "default" for default item costs
close_item_details = 971 # (close_item_details)
show_item_details_with_modifier = 972 # (show_item_details_with_modifier, <item_id>, <item_modifier>, <position_no>, <show_default_text_or_not>)
# show_default_text_or_not should be 1 for showing "default" for default item costs
context_menu_add_item = 980 # (right_mouse_menu_add_item, <string_id>, <value>), must be called only inside script_game_right_mouse_menu_get_buttons
get_average_game_difficulty = 990 # (get_average_game_difficulty, <destination>),
get_level_boundary = 991 # (get_level_boundary, <destination>, <level_no>),
#-----------------------------------------------------------------------------
# MISSION CONDITIONS
#-----------------------------------------------------------------------------
all_enemies_defeated = 1003 # (all_enemies_defeated),
race_completed_by_player = 1004 # (race_completed_by_player),
num_active_teams_le = 1005 # (num_active_teams_le,<value>),
main_hero_fallen = 1006 # (main_hero_fallen),
#-----------------------------------------------------------------------------
# NEGATIONS
#-----------------------------------------------------------------------------
neg = 0x80000000 # (neg|<operation>),
this_or_next = 0x40000000 # (this_or_next|<operation>),
lt = neg | ge # (lt,<value>,<value>), less than
neq = neg | eq # (neq,<value>,<value>), not equal to
le = neg | gt # (le,<value>,<value>), less than or equal to
#-----------------------------------------------------------------------------
# CONSEQUENCE OPERATIONS
#-----------------------------------------------------------------------------
finish_party_battle_mode = 1019 # (finish_party_battle_mode),
set_party_battle_mode = 1020 # (set_party_battle_mode),
set_camera_follow_party = 1021 # (set_camera_follow_party,<party_id>), works on map only.
start_map_conversation = 1025 # (start_map_conversation,<troop_id>),
rest_for_hours = 1030 # (rest_for_hours,<rest_period>,[time_speed],[remain_attackable]),
rest_for_hours_interactive = 1031 # (rest_for_hours_interactive,<rest_period>,[time_speed],[remain_attackable]),
add_xp_to_troop = 1062 # (add_xp_to_troop,<value>,[troop_id]),
add_gold_as_xp = 1063 # (add_gold_as_xp,<value>,[troop_id]),
add_xp_as_reward = 1064 # (add_xp_as_reward,<value>),
add_gold_to_party = 1070 # (add_gold_to_party,<value>,<party_id>), party_id should be different from 0
set_party_creation_random_limits = 1080 # (set_party_creation_random_limits, <min_value>, <max_value>), (values should be between 0, 100)
troop_set_note_available = 1095 # (troop_set_note_available, <troop_id>, <value>), 1 = available, 0 = not available
faction_set_note_available = 1096 # (faction_set_note_available, <faction_id>, <value>), 1 = available, 0 = not available
party_set_note_available = 1097 # (party_set_note_available, <party_id>, <value>), 1 = available, 0 = not available
quest_set_note_available = 1098 # (quest_set_note_available, <quest_id>, <value>), 1 = available, 0 = not available
spawn_around_party = 1100 # (spawn_around_party,<party_id>,<party_template_id>), id of spawned party is put into reg0
set_spawn_radius = 1103 # (set_spawn_radius,<value>),
display_debug_message = 1104 # (display_debug_message,<string_id>,[hex_colour_code]),
# displays message only in debug mode, but writes to rgl_log.txt in both release and debug modes when edit mode is enabled
display_log_message = 1105 # (display_log_message,<string_id>,[hex_colour_code]),
display_message = 1106 # (display_message,<string_id>,[hex_colour_code]),
set_show_messages = 1107 # (set_show_messages,<value>), 0 disables window messages 1 re-enables them.
add_troop_note_tableau_mesh = 1108 # (add_troop_note_tableau_mesh,<troop_id>,<tableau_material_id>),
add_faction_note_tableau_mesh = 1109 # (add_faction_note_tableau_mesh,<faction_id>,<tableau_material_id>),
add_party_note_tableau_mesh = 1110 # (add_party_note_tableau_mesh,<party_id>,<tableau_material_id>),
add_quest_note_tableau_mesh = 1111 # (add_quest_note_tableau_mesh,<quest_id>,<tableau_material_id>),
add_info_page_note_tableau_mesh = 1090 # (add_info_page_note_tableau_mesh,<info_page_id>,<tableau_material_id>),
add_troop_note_from_dialog = 1114 # (add_troop_note_from_dialog,<troop_id>,<note_slot_no>, <value>), there are maximum of 8 slots. value = 1 -> shows when the note is added
add_faction_note_from_dialog = 1115 # (add_faction_note_from_dialog,<faction_id>,<note_slot_no>, <value>), there are maximum of 8 slots value = 1 -> shows when the note is added
add_party_note_from_dialog = 1116 # (add_party_note_from_dialog,<party_id>,<note_slot_no>, <value>), there are maximum of 8 slots value = 1 -> shows when the note is added
add_quest_note_from_dialog = 1112 # (add_quest_note_from_dialog,<quest_id>,<note_slot_no>, <value>), there are maximum of 8 slots value = 1 -> shows when the note is added
add_info_page_note_from_dialog = 1091 # (add_info_page_note_from_dialog,<info_page_id>,<note_slot_no>, <value>), there are maximum of 8 slots value = 1 -> shows when the note is added
add_troop_note_from_sreg = 1117 # (add_troop_note_from_sreg,<troop_id>,<note_slot_no>,<string_id>, <value>), there are maximum of 8 slots value = 1 -> shows when the note is added
add_faction_note_from_sreg = 1118 # (add_faction_note_from_sreg,<faction_id>,<note_slot_no>,<string_id>, <value>), there are maximum of 8 slots value = 1 -> shows when the note is added
add_party_note_from_sreg = 1119 # (add_party_note_from_sreg,<party_id>,<note_slot_no>,<string_id>, <value>), there are maximum of 8 slots value = 1 -> shows when the note is added
add_quest_note_from_sreg = 1113 # (add_quest_note_from_sreg,<quest_id>,<note_slot_no>,<string_id>, <value>), there are maximum of 8 slots value = 1 -> shows when the note is added
add_info_page_note_from_sreg = 1092 # (add_info_page_note_from_sreg,<info_page_id>,<note_slot_no>,<string_id>, <value>), there are maximum of 8 slots value = 1 -> shows when the note is added
tutorial_box = 1120 # (tutorial_box,<string_id>,<string_id>), deprecated use dialog_box instead.
dialog_box = 1120 # (tutorial_box,<text_string_id>,<title_string_id>),
question_box = 1121 # (question_box,<string_id>, [<yes_string_id>], [<no_string_id>]),
tutorial_message = 1122 # (tutorial_message,<string_id>, <color>, , <auto_close_time>), set string_id = -1 for hiding the message
tutorial_message_set_position = 1123 # (tutorial_message_set_position, <position_x>, <position_y>),
tutorial_message_set_size = 1124 # (tutorial_message_set_size, <size_x>, <size_y>),
tutorial_message_set_center_justify = 1125 # (tutorial_message_set_center_justify, <val>), set not 0 for center justify, 0 for not center justify
tutorial_message_set_background = 1126 # (tutorial_message_set_background, <value>), 1 = on, 0 = off, default is off
set_tooltip_text = 1130 # (set_tooltip_text, <string_id>),
reset_price_rates = 1170 # (reset_price_rates),
set_price_rate_for_item = 1171 # (set_price_rate_for_item,<item_id>,<value_percentage>),
set_price_rate_for_item_type = 1172 # (set_price_rate_for_item_type,<item_type_id>,<value_percentage>),
party_join = 1201 # (party_join),
party_join_as_prisoner = 1202 # (party_join_as_prisoner),
troop_join = 1203 # (troop_join,<troop_id>),
troop_join_as_prisoner = 1204 # (troop_join_as_prisoner,<troop_id>),
remove_member_from_party = 1210 # (remove_member_from_party,<troop_id>,[party_id]),
remove_regular_prisoners = 1211 # (remove_regular_prisoners,<party_id>),
remove_troops_from_companions = 1215 # (remove_troops_from_companions,<troop_id>,<value>),
remove_troops_from_prisoners = 1216 # (remove_troops_from_prisoners,<troop_id>,<value>),
heal_party = 1225 # (heal_party,<party_id>),
disable_party = 1230 # (disable_party,<party_id>),
enable_party = 1231 # (enable_party,<party_id>),
remove_party = 1232 # (remove_party,<party_id>),
add_companion_party = 1233 # (add_companion_party,<troop_id_hero>),
add_troop_to_site = 1250 # (add_troop_to_site,<troop_id>,<scene_id>,<entry_no>),
remove_troop_from_site = 1251 # (remove_troop_from_site,<troop_id>,<scene_id>),
modify_visitors_at_site = 1261 # (modify_visitors_at_site,<scene_id>),
reset_visitors = 1262 # (reset_visitors),
set_visitor = 1263 # (set_visitor,<entry_no>,<troop_id>,[<dna>]),
set_visitors = 1264 # (set_visitors,<entry_no>,<troop_id>,<number_of_troops>),
add_visitors_to_current_scene = 1265 # (add_visitors_to_current_scene,<entry_no>,<troop_id>,<number_of_troops>,<team_no>,<group_no>),
# team no and group no are used in multiplayer mode only. default team in entry is used in single player mode
scene_set_day_time = 1266 # (scene_set_day_time, <value>), value in hours (0-23), must be called within ti_before_mission_start triggers
set_relation = 1270 # (set_relation,<faction_id>,<faction_id>,<value>),
faction_set_name = 1275 # (faction_set_name, <faction_id>, <string_id>),
faction_set_color = 1276 # (faction_set_color, <faction_id>, <value>),
faction_get_color = 1277 # (faction_get_color, <color>, <faction_id>)
start_quest = 1280 # (start_quest,<quest_id>),
complete_quest = 1281 # (complete_quest,<quest_id>),
succeed_quest = 1282 # (succeed_quest,<quest_id>), also concludes the quest
fail_quest = 1283 # (fail_quest,<quest_id>), also concludes the quest
cancel_quest = 1284 # (cancel_quest,<quest_id>),
set_quest_progression = 1285 # (set_quest_progression,<quest_id>,<value>),
conclude_quest = 1286 # (conclude_quest,<quest_id>),
setup_quest_text = 1290 # (setup_quest_text,<quest_id>),
setup_quest_giver = 1291 # (setup_quest_giver,<quest_id>, <string_id>),
start_encounter = 1300 # (start_encounter,<party_id>),
leave_encounter = 1301 # (leave_encounter),
encounter_attack = 1302 # (encounter_attack),
select_enemy = 1303 # (select_enemy,<value>),
set_passage_menu = 1304 # (set_passage_menu,<value>),
auto_set_meta_mission_at_end_commited = 1305 # (auto_set_meta_mission_at_end_commited),
end_current_battle = 1307 # (end_current_battle),
set_mercenary_source_party = 1320 # (set_mercenary_source_party,<party_id>), selects party from which to buy mercenaries
set_merchandise_modifier_quality = 1490 # (set_merchandise_modifier_quality,<value>),quality rate in percentage (average quality = 100)
set_merchandise_max_value = 1491 # (set_merchandise_max_value,<value>),
reset_item_probabilities = 1492 # (reset_item_probabilities),
set_item_probability_in_merchandise = 1493 # (set_item_probability_in_merchandise,<itm_id>,<value>),
troop_set_name = 1501 # (troop_set_name, <troop_id>, <string_no>),
troop_set_plural_name = 1502 # (troop_set_plural_name, <troop_id>, <string_no>),
troop_set_face_key_from_current_profile = 1503 # (troop_set_face_key_from_current_profile, <troop_id>),
troop_set_type = 1505 # (troop_set_type,<troop_id>,<gender>),
troop_get_type = 1506 # (troop_get_type,<destination>,<troop_id>),
troop_is_hero = 1507 # (troop_is_hero,<troop_id>),
troop_is_wounded = 1508 # (troop_is_wounded,<troop_id>), only for heroes!
troop_set_auto_equip = 1509 # (troop_set_auto_equip,<troop_id>,<value>), disables otr enables auto-equipping
troop_ensure_inventory_space = 1510 # (troop_ensure_inventory_space,<troop_id>,<value>),
troop_sort_inventory = 1511 # (troop_sort_inventory,<troop_id>),
troop_add_merchandise = 1512 # (troop_add_merchandise,<troop_id>,<item_type_id>,<value>),
troop_add_merchandise_with_faction = 1513 # (troop_add_merchandise_with_faction,<troop_id>,<faction_id>,<item_type_id>,<value>), faction_id is given to check if troop is eligible to produce that item
troop_get_xp = 1515 # (troop_get_xp, <destination>, <troop_id>),
troop_get_class = 1516 # (troop_get_class, <destination>, <troop_id>),
troop_set_class = 1517 # (troop_set_class, <troop_id>, <value>),
troop_raise_attribute = 1520 # (troop_raise_attribute,<troop_id>,<attribute_id>,<value>),
troop_raise_skill = 1521 # (troop_raise_skill,<troop_id>,<skill_id>,<value>),
troop_raise_proficiency = 1522 # (troop_raise_proficiency,<troop_id>,<proficiency_no>,<value>),
troop_raise_proficiency_linear = 1523 # (troop_raise_proficiency_linear,<troop_id>,<proficiency_no>,<value>),
# raises weapon proficiencies linearly without being limited by weapon master skill
troop_add_proficiency_points = 1525 # (troop_add_proficiency_points,<troop_id>,<value>),
troop_add_gold = 1528 # (troop_add_gold,<troop_id>,<value>),
troop_remove_gold = 1529 # (troop_remove_gold,<troop_id>,<value>),
troop_add_item = 1530 # (troop_add_item,<troop_id>,<item_id>,[modifier]),
troop_remove_item = 1531 # (troop_remove_item,<troop_id>,<item_id>),
troop_clear_inventory = 1532 # (troop_clear_inventory,<troop_id>),
troop_equip_items = 1533 # (troop_equip_items,<troop_id>), equips the items in the inventory automatically
troop_inventory_slot_set_item_amount = 1534 # (troop_inventory_slot_set_item_amount,<troop_id>,<inventory_slot_no>,<value>),
troop_inventory_slot_get_item_amount = 1537 # (troop_inventory_slot_get_item_amount,<destination>,<troop_id>,<inventory_slot_no>),
troop_inventory_slot_get_item_max_amount = 1538 # (troop_inventory_slot_get_item_max_amount,<destination>,<troop_id>,<inventory_slot_no>),
troop_add_items = 1535 # (troop_add_items,<troop_id>,<item_id>,<number>),
troop_remove_items = 1536 # (troop_remove_items,<troop_id>,<item_id>,<number>), puts cost of items to reg0
troop_loot_troop = 1539 # (troop_loot_troop,<target_troop>,<source_troop_id>,<probability>),
troop_get_inventory_capacity = 1540 # (troop_get_inventory_capacity,<destination>,<troop_id>),
troop_get_inventory_slot = 1541 # (troop_get_inventory_slot,<destination>,<troop_id>,<inventory_slot_no>),
troop_get_inventory_slot_modifier = 1542 # (troop_get_inventory_slot_modifier,<destination>,<troop_id>,<inventory_slot_no>),
troop_set_inventory_slot = 1543 # (troop_set_inventory_slot,<troop_id>,<inventory_slot_no>,<value>),
troop_set_inventory_slot_modifier = 1544 # (troop_set_inventory_slot_modifier,<troop_id>,<inventory_slot_no>,<value>),
troop_set_faction = 1550 # (troop_set_faction,<troop_id>,<faction_id>),
troop_set_age = 1555 # (troop_set_age, <troop_id>, <age_slider_pos>), enter a value between 0..100
troop_set_health = 1560 # (troop_set_health,<troop_id>,<relative health (0-100)>),
troop_get_upgrade_troop = 1561 # (troop_get_upgrade_troop,<destination>,<troop_id>,<upgrade_path>), upgrade_path can be: 0 = get first node, 1 = get second node (returns -1 if not available)
item_get_type = 1570 # (item_get_type, <destination>, <item_id>), returned values are listed at header_items.py (values starting with itp_type_)
party_get_num_companions = 1601 # (party_get_num_companions,<destination>,<party_id>),
party_get_num_prisoners = 1602 # (party_get_num_prisoners,<destination>,<party_id>),
party_set_flags = 1603 # (party_set_flag, <party_id>, <flag>, <clear_or_set>), sets flags like pf_default_behavior. see header_parties.py for flags.
party_set_marshall = 1604 # (party_set_marshall, <party_id>, <value>)
party_set_extra_text = 1605 # (party_set_extra_text,<party_id>, <string>)
party_set_aggressiveness = 1606 # (party_set_aggressiveness, <party_id>, <number>),
party_set_courage = 1607 # (party_set_courage, <party_id>, <number>),
party_get_current_terrain = 1608 # (party_get_current_terrain,<destination>,<party_id>),
party_get_template_id = 1609 # (party_get_template_id,<destination>,<party_id>),
party_add_members = 1610 # (party_add_members,<party_id>,<troop_id>,<number>), returns number added in reg0
party_add_prisoners = 1611 # (party_add_prisoners,<party_id>,<troop_id>,<number>), returns number added in reg0
party_add_leader = 1612 # (party_add_leader,<party_id>,<troop_id>,[<number>]),
party_force_add_members = 1613 # (party_force_add_members,<party_id>,<troop_id>,<number>),
party_force_add_prisoners = 1614 # (party_force_add_prisoners,<party_id>,<troop_id>,<number>),
party_remove_members = 1615 # (party_remove_members,<party_id>,<troop_id>,<number>), stores number removed to reg0
party_remove_prisoners = 1616 # (party_remove_prisoners,<party_id>,<troop_id>,<number>), stores number removed to reg0
party_clear = 1617 # (party_clear,<party_id>),
party_wound_members = 1618 # (party_wound_members,<party_id>,<troop_id>,<number>),
party_remove_members_wounded_first = 1619 # (party_remove_members_wounded_first,<party_id>,<troop_id>,<number>), stores number removed to reg0
party_set_faction = 1620 # (party_set_faction,<party_id>,<faction_id>),
party_relocate_near_party = 1623 # (party_relocate_near_party,<party_id>,<target_party_id>,<value_spawn_radius>),
party_get_position = 1625 # (party_get_position,<position_no>,<party_id>),
party_set_position = 1626 # (party_set_position,<party_id>,<position_no>),
map_get_random_position_around_position = 1627 # (map_get_random_position_around_position,<dest_position_no>,<source_position_no>,<radius>),
map_get_land_position_around_position = 1628 # (map_get_land_position_around_position,<dest_position_no>,<source_position_no>,<radius>),
map_get_water_position_around_position = 1629 # (map_get_water_position_around_position,<dest_position_no>,<source_position_no>,<radius>),
party_count_members_of_type = 1630 # (party_count_members_of_type,<destination>,<party_id>,<troop_id>),
party_count_companions_of_type = 1631 # (party_count_companions_of_type,<destination>,<party_id>,<troop_id>),
party_count_prisoners_of_type = 1632 # (party_count_prisoners_of_type,<destination>,<party_id>,<troop_id>),
party_get_free_companions_capacity = 1633 # (party_get_free_companions_capacity,<destination>,<party_id>),
party_get_free_prisoners_capacity = 1634 # (party_get_free_prisoners_capacity,<destination>,<party_id>),
party_get_ai_initiative = 1638 # (party_get_ai_initiative,<destination>,<party_id>), result is between 0-100
party_set_ai_initiative = 1639 # (party_set_ai_initiative,<party_id>,<value>), value is between 0-100
party_set_ai_behavior = 1640 # (party_set_ai_behavior,<party_id>,<ai_bhvr>),
party_set_ai_object = 1641 # (party_set_ai_object,<party_id>,<party_id>),
party_set_ai_target_position = 1642 # (party_set_ai_target_position,<party_id>,<position_no>),
party_set_ai_patrol_radius = 1643 # (party_set_ai_patrol_radius,<party_id>,<radius_in_km>),
party_ignore_player = 1644 # (party_ignore_player, <party_id>,<duration_in_hours>), don't pursue player party for this duration
party_set_bandit_attraction = 1645 # (party_set_bandit_attraction, <party_id>,<attaraction>), set how attractive a target the party is for bandits (0..100)
party_get_helpfulness = 1646 # (party_get_helpfulness,<destination>,<party_id>),
party_set_helpfulness = 1647 # (party_set_helpfulness, <party_id>, <number>), tendency to help friendly parties under attack. (0-10000, 100 default.)
party_get_num_companion_stacks = 1650 # (party_get_num_companion_stacks,<destination>,<party_id>),
party_get_num_prisoner_stacks = 1651 # (party_get_num_prisoner_stacks, <destination>,<party_id>),
party_stack_get_troop_id = 1652 # (party_stack_get_troop_id, <destination>,<party_id>,<stack_no>),
party_stack_get_size = 1653 # (party_stack_get_size, <destination>,<party_id>,<stack_no>),
party_stack_get_num_wounded = 1654 # (party_stack_get_num_wounded, <destination>,<party_id>,<stack_no>),
party_stack_get_troop_dna = 1655 # (party_stack_get_troop_dna, <destination>,<party_id>,<stack_no>),
party_prisoner_stack_get_troop_id = 1656 # (party_get_prisoner_stack_troop,<destination>,<party_id>,<stack_no>),
party_prisoner_stack_get_size = 1657 # (party_get_prisoner_stack_size, <destination>,<party_id>,<stack_no>),
party_prisoner_stack_get_troop_dna = 1658 # (party_prisoner_stack_get_troop_dna, <destination>,<party_id>,<stack_no>),
party_attach_to_party = 1660 # (party_attach_to_party, <party_id>, <party_id to attach to>),
party_detach = 1661 # (party_detach, <party_id>),
party_collect_attachments_to_party = 1662 # (party_collect_attachments_to_party, <party_id>, <destination party_id>),
party_quick_attach_to_current_battle = 1663 # (party_quick_attach_to_current_battle, <party_id>, <side (0:players side, 1:enemy side)>),
party_get_cur_town = 1665 # (party_get_cur_town, <destination>, <party_id>),
party_leave_cur_battle = 1666 # (party_leave_cur_battle, <party_id>),
party_set_next_battle_simulation_time = 1667 # (party_set_next_battle_simulation_time,<party_id>,<next_simulation_time_in_hours>),
party_set_name = 1669 # (party_set_name, <party_id>, <string_no>),
party_add_xp_to_stack = 1670 # (party_add_xp_to_stack, <party_id>, <stack_no>, <xp_amount>),
party_get_morale = 1671 # (party_get_morale, <destination>,<party_id>),
party_set_morale = 1672 # (party_set_morale, <party_id>, <value>), value is clamped to range [0...100].
party_upgrade_with_xp = 1673 # (party_upgrade_with_xp, <party_id>, <xp_amount>, <upgrade_path>), upgrade_path can be: 0 = random, 1 = first, 2 = second
party_add_xp = 1674 # (party_add_xp, <party_id>, <xp_amount>),
party_add_template = 1675 # (party_add_template, <party_id>, <party_template_id>, [reverse_prisoner_status]),
party_set_icon = 1676 # (party_set_icon, <party_id>, <map_icon_id>),
party_set_banner_icon = 1677 # (party_set_banner_icon, <party_id>, <map_icon_id>),
party_add_particle_system = 1678 # (party_add_particle_system, <party_id>, <particle_system_id>),
party_clear_particle_systems = 1679 # (party_clear_particle_systems, <party_id>),
party_get_battle_opponent = 1680 # (party_get_battle_opponent, <destination>, <party_id>)
party_get_icon = 1681 # (party_get_icon, <destination>, <party_id>),
party_set_extra_icon = 1682 # (party_set_extra_icon, <party_id>, <map_icon_id>, <up_down_distance_fixed_point>, <up_down_frequency_fixed_point>, <rotate_frequency_fixed_point>, <fade_in_out_frequency_fixed_point>), frequencies are in number of revolutions per second
party_get_skill_level = 1685 # (party_get_skill_level, <destination>, <party_id>, <skill_no>),
get_battle_advantage = 1690 # (get_battle_advantage, <destination>),
set_battle_advantage = 1691 # (set_battle_advantage, <value>),
party_get_attached_to = 1694 # (party_get_attached_to, <destination>, <party_id>),
party_get_num_attached_parties = 1695 # (party_get_num_attached_parties, <destination>, <party_id>),
party_get_attached_party_with_rank = 1696 # (party_get_attached_party_with_rank, <destination>, <party_id>, <attached_party_no>),
inflict_casualties_to_party_group = 1697 # (inflict_casualties_to_party, <parent_party_id>, <attack_rounds>, <party_id_to_add_causalties_to>),
distribute_party_among_party_group = 1698 # (distribute_party_among_party_group, <party_to_be_distributed>, <group_root_party>),
get_player_agent_no = 1700 # (get_player_agent_no,<destination>),
get_player_agent_kill_count = 1701 # (get_player_agent_kill_count,<destination>,[get_wounded]), set second value to non-zero to get wounded count. returns lifetime kill counts
agent_is_alive = 1702 # (agent_is_alive,<agent_id>),
agent_is_wounded = 1703 # (agent_is_wounded,<agent_id>),
agent_is_human = 1704 # (agent_is_human,<agent_id>),
get_player_agent_own_troop_kill_count = 1705 # (get_player_agent_own_troop_kill_count,<destination>,[get_wounded]), set second value to non-zero to get wounded count
agent_is_ally = 1706 # (agent_is_ally,<agent_id>),
agent_is_non_player = 1707 # (agent_is_non_player, <agent_id>),
agent_is_defender = 1708 # (agent_is_defender,<agent_id>),
agent_is_routed = 1699 # (agent_is_routed,<agent_id>),
agent_is_in_special_mode = 1693 # (agent_is_in_special_mode,<agent_id>),
agent_get_look_position = 1709 # (agent_get_look_position, <position_no>, <agent_id>),
agent_get_position = 1710 # (agent_get_position,<position_no>,<agent_id>),
agent_set_position = 1711 # (agent_set_position,<agent_id>,<position_no>),
agent_get_speed = 1689 # (agent_get_speed, <position_no>, <agent_id>), will return speed in x and y
agent_is_active = 1712 # (agent_is_active,<agent_id>),
agent_set_look_target_agent = 1713 # (agent_set_look_target_agent, <agent_id>, <agent_id>), second agent_id is the target
agent_get_horse = 1714 # (agent_get_horse,<destination>,<agent_id>),
agent_get_rider = 1715 # (agent_get_rider,<destination>,<agent_id>),
agent_get_party_id = 1716 # (agent_get_party_id,<destination>,<agent_id>),
agent_get_entry_no = 1717 # (agent_get_entry_no,<destination>,<agent_id>),
agent_get_troop_id = 1718 # (agent_get_troop_id,<destination>, <agent_id>),
agent_get_item_id = 1719 # (agent_get_item_id,<destination>, <agent_id>), (works only for horses, returns -1 otherwise)
store_agent_hit_points = 1720 # (store_agent_hit_points,<destination>,<agent_id>,[absolute]),
# set absolute to 1 to retrieve actual hps, otherwise will return relative hp in range [0..100]
agent_set_hit_points = 1721 # (agent_set_hit_points,<agent_id>,<value>,[absolute]),
# set absolute to 1 if value is absolute, otherwise value will be treated as relative number in range [0..100]
agent_deliver_damage_to_agent = 1722 # (agent_deliver_damage_to_agent, <agent_id_deliverer>, <agent_id>, <value>, [item_id]),
# if value <= 0, then damage will be calculated using the weapon item. item_id is the item that the damage is delivered. can be ignored.
agent_get_kill_count = 1723 # (agent_get_kill_count,<destination>,<agent_id>,[get_wounded]), set second value to non-zero to get wounded count
agent_get_player_id = 1724 # (agent_get_player_id,<destination>,<agent_id>),
agent_set_invulnerable_shield = 1725 # (agent_set_invulnerable_shield, <agent_id>),
agent_get_wielded_item = 1726 # (agent_get_wielded_item,<destination>,<agent_id>,<hand_no>),
agent_get_ammo = 1727 # (agent_get_ammo,<destination>,<agent_id>, <value>), value = 1 gets ammo for wielded item, value = 0 gets ammo for all items
agent_get_ammo_for_slot = 1825 # (agent_get_ammo_for_slot, <destination>, <agent_id>, <slot_no>), slot no can be between 0-3 (weapon slots)
agent_refill_ammo = 1728 # (agent_refill_ammo,<agent_id>),
agent_refill_wielded_shield_hit_points = 1692 # (agent_refill_wielded_shield_hit_points, <agent_id>),
agent_has_item_equipped = 1729 # (agent_has_item_equipped,<agent_id>,<item_id>),
agent_set_scripted_destination = 1730 # (agent_set_scripted_destination,<agent_id>,<position_no>,<auto_set_z_to_ground_level>), auto_set_z_to_ground_level can be 0 (false) or 1 (true)
agent_get_scripted_destination = 1731 # (agent_get_scripted_destination,<position_no>,<agent_id>),
agent_force_rethink = 1732 # (agent_force_rethink, <agent_id>),
agent_set_no_death_knock_down_only = 1733 # (agent_set_no_death_knock_down_only, <agent_id>, <value>), 0 for disable, 1 for enable
agent_set_horse_speed_factor = 1734 # (agent_set_horse_speed_factor, <agent_id>, <speed_multiplier-in-1/100>),
agent_clear_scripted_mode = 1735 # (agent_clear_scripted_mode,<agent_id>),
agent_set_speed_limit = 1736 # (agent_set_speed_limit,<agent_id>,<speed_limit(kilometers/hour)>), affects ai only
agent_ai_set_always_attack_in_melee = 1737 # (agent_ai_set_always_attack_in_melee, <agent_id>,<value>), to be used in sieges so that agents don't wait on the ladder.
agent_get_simple_behavior = 1738 # (agent_get_simple_behavior, <destination>, <agent_id>), constants are written in header_mission_templates.py, starting with aisb_
agent_get_combat_state = 1739 # (agent_get_combat_state, <destination>, <agent_id>),
agent_set_animation = 1740 # (agent_set_animation, <agent_id>, <anim_id>, [channel_no]), channel_no default is 0. top body only animations should have channel_no value as 1.
agent_set_stand_animation = 1741 # (agent_set_stand_action, <agent_id>, <anim_id>),
agent_set_walk_forward_animation = 1742 # (agent_set_walk_forward_action, <agent_id>, <anim_id>),
agent_set_animation_progress = 1743 # (agent_set_animation_progress, <agent_id>, <value_fixed_point>), value should be between 0-1 (as fixed point)
agent_set_look_target_position = 1744 # (agent_set_look_target_position, <agent_id>, <position_no>),
agent_set_attack_action = 1745 # (agent_set_attack_action, <agent_id>, <value>, <value>), value: -2 = clear any attack action, 0 = thrust, 1 = slashright, 2 = slashleft, 3 = overswing - second value 0 = ready and release, 1 = ready and hold
agent_set_defend_action = 1746 # (agent_set_defend_action, <agent_id>, <value>, <duration-in-1/1000-seconds>), value: -2 = clear any defend action, 0 = defend_down, 1 = defend_right, 2 = defend_left, 3 = defend_up
agent_set_wielded_item = 1747 # (agent_set_wielded_item, <agent_id>, <item_id>),
agent_set_scripted_destination_no_attack = 1748 # (agent_set_scripted_destination_no_attack,<agent_id>,<position_no>,<auto_set_z_to_ground_level>), auto_set_z_to_ground_level can be 0 (false) or 1 (true)
agent_fade_out = 1749 # (agent_fade_out, <agent_id>),
agent_play_sound = 1750 # (agent_play_sound, <agent_id>, <sound_id>),
agent_start_running_away = 1751 # (agent_start_running_away, <agent_id>, [position_no]), if position_no is entered, agent will run away to that location; pos0 will be ignored.
agent_stop_running_away = 1752 # (agent_stop_run_away, <agent_id>),
agent_ai_set_aggressiveness = 1753 # (agent_ai_set_aggressiveness, <agent_id>, <value>), 100 is the default aggressiveness. higher the value, less likely to run back
agent_set_kick_allowed = 1754 # (agent_set_kick_allowed, <agent_id>, <value>), 0 for disable, 1 for allow
remove_agent = 1755 # (remove_agent, <agent_id>),
agent_get_attached_scene_prop = 1756 # (agent_get_attached_scene_prop, <destination>, <agent_id>)
agent_set_attached_scene_prop = 1757 # (agent_set_attached_scene_prop, <agent_id>, <scene_prop_id>)
agent_set_attached_scene_prop_x = 1758 # (agent_set_attached_scene_prop_x, <agent_id>, <value>)
agent_set_attached_scene_prop_y = 1809 # (agent_set_attached_scene_prop_y, <agent_id>, <value>)
agent_set_attached_scene_prop_z = 1759 # (agent_set_attached_scene_prop_z, <agent_id>, <value>)
agent_get_time_elapsed_since_removed = 1760 # (agent_get_time_elapsed_since_dead, <destination>, <agent_id>),
agent_get_number_of_enemies_following = 1761 # (agent_get_number_of_enemies_following, <destination>, <agent_id>),
agent_set_no_dynamics = 1762 # (agent_set_no_dynamics, <agent_id>, <value>), 0 = turn dynamics off, 1 = turn dynamics on (required for cut-scenes)
agent_get_attack_action = 1763 # (agent_get_attack_action, <destination>, <agent_id>), free = 0, readying_attack = 1, releasing_attack = 2, completing_attack_after_hit = 3, attack_parried = 4, reloading = 5, after_release = 6, cancelling_attack = 7
agent_get_defend_action = 1764 # (agent_get_defend_action, <destination>, <agent_id>), free = 0, parrying = 1, blocking = 2
agent_get_group = 1765 # (agent_get_group, <destination>, <agent_id>),
agent_set_group = 1766 # (agent_set_group, <agent_id>, <value>),
agent_get_action_dir = 1767 # (agent_get_action_dir, <destination>, <agent_id>), invalid = -1, down = 0, right = 1, left = 2, up = 3
agent_get_animation = 1768 # (agent_get_animation, <destination>, <agent_id>, <body_part), 0 = lower body part, 1 = upper body part
agent_is_in_parried_animation = 1769 # (agent_is_in_parried_animation, <agent_id>),
agent_get_team = 1770 # (agent_get_team ,<destination>, <agent_id>),
agent_set_team = 1771 # (agent_set_team , <agent_id>, <value>),
agent_get_class = 1772 # (agent_get_class ,<destination>, <agent_id>),
agent_get_division = 1773 # (agent_get_division ,<destination>, <agent_id>),
agent_unequip_item = 1774 # (agent_unequip_item, <agent_id>, <item_id>, [weapon_slot_no]),
# weapon_slot_no is optional, and can be between 1-4 (used only for weapons, not armor). in either case, item_id has to be set correctly.
class_is_listening_order = 1775 # (class_is_listening_order, <team_no>, <sub_class>),
agent_set_ammo = 1776 # (agent_set_ammo,<agent_id>,<item_id>,<value>), value = a number between 0 and maximum ammo
agent_add_offer_with_timeout = 1777 # (agent_add_offer_with_timeout, <agent_id>, <agent_id>, <duration-in-1/1000-seconds>),
# second agent_id is offerer, 0 value for duration is an infinite offer
agent_check_offer_from_agent = 1778 # (agent_check_offer_from_agent, <agent_id>, <agent_id>), second agent_id is offerer
agent_equip_item = 1779 # (agent_equip_item, <agent_id>, <item_id>, [weapon_slot_no]), for weapons, agent needs to have an empty weapon slot.
# weapon_slot_no is optional, and can be between 1-4 (used only for weapons, not armor).
entry_point_get_position = 1780 # (entry_point_get_position, <position_no>, <entry_no>),
entry_point_set_position = 1781 # (entry_point_set_position, <entry_no>, <position_no>),
entry_point_is_auto_generated = 1782 # (entry_point_is_auto_generated, <entry_no>),
agent_set_division = 1783 # (agent_set_division, <agent_id>, <value>),
team_get_hold_fire_order = 1784 # (team_get_hold_fire_order, <destination>, <team_no>, <sub_class>),
team_get_movement_order = 1785 # (team_get_movement_order, <destination>, <team_no>, <sub_class>),
team_get_riding_order = 1786 # (team_get_riding_order, <destination>, <team_no>, <sub_class>),
team_get_weapon_usage_order = 1787 # (team_get_weapon_usage_order, <destination>, <team_no>, <sub_class>),
teams_are_enemies = 1788 # (teams_are_enemies, <team_no>, <team_no_2>),
team_give_order = 1790 # (team_give_order, <team_no>, <sub_class>, <order_id>),
team_set_order_position = 1791 # (team_set_order_position, <team_no>, <sub_class>, <position_no>),
team_get_leader = 1792 # (team_get_leader, <destination>, <team_no>),
team_set_leader = 1793 # (team_set_leader, <team_no>, <new_leader_agent_id>),
team_get_order_position = 1794 # (team_get_order_position, <position_no>, <team_no>, <sub_class>),
team_set_order_listener = 1795 # (team_set_order_listener, <team_no>, <sub_class>, <merge_with_old_listeners>), clear listeners if sub_class is less than zero
team_set_relation = 1796 # (team_set_relation, <team_no>, <team_no_2>, <value>), -1 for enemy, 1 for friend, 0 for neutral
set_rain = 1797 # (set_rain,<rain-type>,<strength>), (rain_type: 1= rain, 2=snow ; strength: 0 - 100)
set_fog_distance = 1798 # (set_fog_distance, <distance_in_meters>, [fog_color]),
get_scene_boundaries = 1799 # (get_scene_boundaries, <position_min>, <position_max>),
scene_prop_enable_after_time = 1800 # (scene_prop_enable_after_time, <scene_prop_id>, <value>)
scene_prop_has_agent_on_it = 1801 # (scene_prop_has_agent_on_it, <scene_prop_id>, <agent_id>)
agent_clear_relations_with_agents = 1802 # (agent_clear_relations_with_agents, <agent_id>),
agent_add_relation_with_agent = 1803 # (agent_add_relation_with_agent, <agent_id>, <agent_id>, <value>), -1 = enemy, 0 = neutral (no friendly fire at all), 1 = ally
agent_get_item_slot = 1804 # (agent_get_item_slot, <destination>, <agent_id>, <equip_slot>), equip slots are defined in header_items starting with ek_
ai_mesh_face_group_show_hide = 1805 # (ai_mesh_face_group_show_hide, <group_no>, <value>), 1 for enable, 0 for disable
agent_is_alarmed = 1806 # (agent_is_alarmed, <agent_id>),
agent_set_is_alarmed = 1807 # (agent_set_is_alarmed, <agent_id>, <value>), 1 for enable, 0 for disable
agent_stop_sound = 1808 # (agent_stop_sound, <agent_id>),
scene_prop_get_num_instances = 1810 # (scene_prop_get_num_instances, <destination>, <scene_prop_id>),
scene_prop_get_instance = 1811 # (scene_prop_get_instance, <destination>, <scene_prop_id>, <instance_no>),
scene_prop_get_visibility = 1812 # (scene_prop_get_visibility, <destination>, <scene_prop_id>),
scene_prop_set_visibility = 1813 # (scene_prop_set_visibility, <scene_prop_id>, <value>),
scene_prop_set_hit_points = 1814 # (scene_prop_set_hit_points, <scene_prop_id>, <value>),
scene_prop_get_hit_points = 1815 # (scene_prop_get_hit_points, <destination>, <scene_prop_id>),
scene_prop_get_max_hit_points = 1816 # (scene_prop_get_max_hit_points, <destination>, <scene_prop_id>),
scene_prop_get_team = 1817 # (scene_prop_get_team, <value>, <scene_prop_id>),
scene_prop_set_team = 1818 # (scene_prop_set_team, <scene_prop_id>, <value>),
scene_prop_set_prune_time = 1819 # (scene_prop_set_prune_time, <scene_prop_id>, <value>),
# prune time can only be set to objects that are already on the prune queue. static objects are not affected by this operation.
scene_prop_set_cur_hit_points = 1820 # (scene_prop_set_cur_hit_points, <scene_prop_id>, <value>),
scene_prop_fade_out = 1822 # (scene_prop_fade_out, <scene_prop_id>, <fade_out_time>),
scene_prop_fade_in = 1823 # (scene_prop_fade_in, <scene_prop_id>, <fade_in_time>),
agent_is_in_line_of_sight = 1826 # (agent_is_in_line_of_sight, <agent_id>, <position_no>), rotation of the position register is not used.
agent_deliver_damage_to_agent_advanced = 1827 # (agent_deliver_damage_to_agent_advanced, <destination>, <agent_id_deliverer>, <agent_id>, <value>, [item_id]),
# if value <= 0, then damage will be calculated using the weapon item. item_id is the item that the damage is delivered. can be ignored.
# this advanced mode of agent_deliver_damage_to_agent has 2 differences. 1- the delivered damage is returned. 2- the damage delivery is done after checking the relationship between agents. this might cause no damage, or even damage to the shooter agent because of a friendly fire.
team_get_gap_distance = 1828 # (team_get_gap_distance, <destination>, <team_no>, <sub_class>),
add_missile = 1829 # (add_missile, <agent_id>, <starting_position>, <starting_speed_fixed_point>, <weapon_item_id>, <weapon_item_modifier>, <missile_item_id>, <missile_item_modifier>), starting position also contains the direction of the arrow
scene_item_get_num_instances = 1830 # (scene_item_get_num_instances, <destination>, <item_id>),
scene_item_get_instance = 1831 # (scene_item_get_instance, <destination>, <item_id>, <instance_no>),
scene_spawned_item_get_num_instances = 1832 # (scene_spawned_item_get_num_instances, <destination>, <item_id>),
scene_spawned_item_get_instance = 1833 # (scene_spawned_item_get_instance, <destination>, <item_id>, <instance_no>),
scene_allows_mounted_units = 1834 # (scene_allows_mounted_units),
class_set_name = 1837 # (class_set_name, <sub_class>, <string_id>),
prop_instance_is_valid = 1838 # (prop_instance_is_valid, <scene_prop_id>),
prop_instance_get_variation_id = 1840 # (prop_instance_get_variation_id, <destination>, <scene_prop_id>),
prop_instance_get_variation_id_2 = 1841 # (prop_instance_get_variation_id_2, <destination>, <scene_prop_id>),
prop_instance_get_position = 1850 # (prop_instance_get_position, <position_no>, <scene_prop_id>),
prop_instance_get_starting_position = 1851 # (prop_instance_get_starting_position, <position_no>, <scene_prop_id>),
prop_instance_get_scale = 1852 # (prop_instance_get_scale, <position_no>, <scene_prop_id>),
prop_instance_get_scene_prop_kind = 1853 # (prop_instance_get_scene_prop_type, <destination>, <scene_prop_id>)
prop_instance_set_scale = 1854 # (prop_instance_set_scale, <scene_prop_id>, <value_x_fixed_point>, <value_y_fixed_point>, <value_z_fixed_point>),
prop_instance_set_position = 1855 # (prop_instance_set_position, <scene_prop_id>, <position_no>, [dont_send_to_clients]),
# dont_send_to_clients default is 0 - if you are just doing some physics checks with scene props, then set to 1 and don't send to clients
prop_instance_animate_to_position = 1860 # (prop_instance_animate_to_position, <scene_prop_id>, position, <duration-in-1/100-seconds>),
prop_instance_stop_animating = 1861 # (prop_instance_stop_animating, <scene_prop_id>),
prop_instance_is_animating = 1862 # (prop_instance_is_animating, <destination>, <scene_prop_id>),
prop_instance_get_animation_target_position = 1863 # (prop_instance_get_animation_target_position, <pos>, <scene_prop_id>)
prop_instance_enable_physics = 1864 # (prop_instance_enable_physics, <scene_prop_id>, <value>) 0 for disable, 1 for enable
prop_instance_rotate_to_position = 1865 # (prop_instance_rotate_to_position, <scene_prop_id>, position, <duration-in-1/100-seconds>, <total_rotate_angle>),
prop_instance_initialize_rotation_angles = 1866 # (prop_instance_initialize_rotation_angles, <scene_prop_id>),
prop_instance_refill_hit_points = 1870 # (prop_instance_refill_hit_points, <scene_prop_id>),
prop_instance_dynamics_set_properties = 1871 # (prop_instance_dynamics_set_properties,<scene_prop_id>,mass_friction),
prop_instance_dynamics_set_velocity = 1872 # (prop_instance_dynamics_set_velocity,<scene_prop_id>,linear_velocity),
prop_instance_dynamics_set_omega = 1873 # (prop_instance_dynamics_set_omega,<scene_prop_id>,angular_velocity),
prop_instance_dynamics_apply_impulse = 1874 # (prop_instance_dynamics_apply_impulse,<scene_prop_id>,impulse_force),
prop_instance_receive_damage = 1877 # (prop_instance_receive_damage, <scene_prop_id>, <agent_id>, <damage_value>),
prop_instance_intersects_with_prop_instance = 1880 # (prop_instance_intersects_with_prop_instance, <scene_prop_id>, <scene_prop_id>), give second scene_prop_id as -1 to check all scene props.
# cannot check polygon-to-polygon physics models, but can check any other combinations between sphere, capsule and polygon physics models.
prop_instance_play_sound = 1881 # (prop_instance_play_sound, <scene_prop_id>, <sound_id>, [flags]), sound flags can be given
prop_instance_stop_sound = 1882 # (prop_instance_stop_sound, <scene_prop_id>),
prop_instance_clear_attached_missiles = 1885 # (prop_instance_clear_attached_missiles, <scene_prop_id>), works only with dynamic scene props (non-retrievable missiles)
prop_instance_add_particle_system = 1886 # (prop_instance_add_particle_system, <scene_prop_id>, <par_sys_id>, <position_no>), position is local, not global.
prop_instance_stop_all_particle_systems = 1887 # (prop_instance_stop_all_particle_systems, <scene_prop_id>),
replace_prop_instance = 1889 # (replace_prop_instance, <scene_prop_id>, <new_scene_prop_id>),
replace_scene_props = 1890 # (replace_scene_props, <old_scene_prop_id>,<new_scene_prop_id>),
replace_scene_items_with_scene_props = 1891 # (replace_scene_items_with_scene_props, <old_item_id>,<new_scene_prop_id>),
#-----------------------------------------------------------------------------
# MISSION CONSEQUENCES
#-----------------------------------------------------------------------------
set_mission_result = 1906 # (set_mission_result,<value>),
finish_mission = 1907 # (finish_mission, <delay_in_seconds>),
jump_to_scene = 1910 # (jump_to_scene,<scene_id>,<entry_no>),
set_jump_mission = 1911 # (set_jump_mission,<mission_template_id>),
set_jump_entry = 1912 # (set_jump_entry,<entry_no>),
start_mission_conversation = 1920 # (start_mission_conversation,<troop_id>),
add_reinforcements_to_entry = 1930 # (add_reinforcements_to_entry,<mission_template_entry_no>,<value>),
mission_enable_talk = 1935 # (mission_enable_talk), can talk with troops during battles
mission_disable_talk = 1936 # (mission_disable_talk), disables talk option for the mission
mission_tpl_entry_set_override_flags = 1940 # (mission_entry_set_override_flags, <mission_template_id>, <entry_no>, <value>),
mission_tpl_entry_clear_override_items = 1941 # (mission_entry_clear_override_items, <mission_template_id>, <entry_no>),
mission_tpl_entry_add_override_item = 1942 # (mission_entry_add_override_item, <mission_template_id>, <entry_no>, <item_kind_id>),
set_current_color = 1950 # (set_current_color,<red>,<green>,<blue>), a value of 255 means 100%
set_position_delta = 1955 # (set_position_delta,<x_value>,<y_value>,<z_value>),
add_point_light = 1960 # (add_point_light,[flicker_magnitude],[flicker_interval]), flicker_magnitude between 0 and 100, flicker_interval is in 1/100 seconds
add_point_light_to_entity = 1961 # (add_point_light_to_entity,[flicker_magnitude],[flicker_interval]), flicker_magnitude between 0 and 100, flicker_interval is in 1/100 seconds
particle_system_add_new = 1965 # (particle_system_add_new,<par_sys_id>,[position_no]),
particle_system_emit = 1968 # (particle_system_emit,<par_sys_id>,<value_num_particles>,<value_period>),
particle_system_burst = 1969 # (particle_system_burst,<par_sys_id>,<position_no>,[percentage_burst_strength]),
set_spawn_position = 1970 # (set_spawn_position, <position_no>)
spawn_item = 1971 # (spawn_item, <item_kind_id>, <item_modifier>, [seconds_before_pruning]) if seconds_before_pruning = 0 then item never gets pruned
spawn_agent = 1972 # (spawn_agent,<troop_id>), (stores agent_id in reg0)
spawn_horse = 1973 # (spawn_horse,<item_kind_id>, <item_modifier>) (stores agent_id in reg0)
spawn_scene_prop = 1974 # (spawn_scene_prop, <scene_prop_id>) (stores prop_instance_id in reg0) not yet.
particle_system_burst_no_sync = 1975 # (particle_system_burst_without_sync,<par_sys_id>,<position_no>,[percentage_burst_strength]),
spawn_item_without_refill = 1976 # (spawn_item_without_refill, <item_kind_id>, <item_modifier>, [seconds_before_pruning]), if seconds_before_pruning = 0 then item never gets pruned
agent_get_item_cur_ammo = 1977 # (agent_get_item_cur_ammo, <destination>, <agent_id>, <slot_no>),
cur_tableau_add_tableau_mesh = 1980 # (cur_tableau_add_tableau_mesh, <tableau_material_id>, <value>, <position_register_no>), value is passed to tableau_material
cur_item_set_tableau_material = 1981 # (cur_item_set_tableu_material, <tableau_material_id>, <instance_code>), only call inside ti_on_init_item in module_items
cur_scene_prop_set_tableau_material = 1982 # (cur_scene_prop_set_tableau_material, <tableau_material_id>, <instance_code>), only call inside ti_on_init_scene_prop in module_scene_props
cur_map_icon_set_tableau_material = 1983 # (cur_map_icon_set_tableau_material, <tableau_material_id>, <instance_code>), only call inside ti_on_init_map_icon in module_scene_props
cur_tableau_render_as_alpha_mask = 1984 # (cur_tableau_render_as_alpha_mask)
cur_tableau_set_background_color = 1985 # (cur_tableau_set_background_color, <value>),
cur_agent_set_banner_tableau_material = 1986 # (cur_agent_set_banner_tableau_material, <tableau_material_id>)
cur_tableau_set_ambient_light = 1987 # (cur_tableau_set_ambient_light, <red_fixed_point>, <green_fixed_point>, <blue_fixed_point>),
cur_tableau_set_camera_position = 1988 # (cur_tableau_set_camera_position, <position_no>),
cur_tableau_set_camera_parameters = 1989 # (cur_tableau_set_camera_parameters,<is_perspective>,<camera_width*1000>, <camera_height*1000>, <camera_near*1000>, <camera_far*1000>),
cur_tableau_add_point_light = 1990 # (cur_tableau_add_point_light, <map_icon_id>, <position_no>, <red_fixed_point>, <green_fixed_point>, <blue_fixed_point>),
cur_tableau_add_sun_light = 1991 # (cur_tableau_add_sun_light, <map_icon_id>, <position_no>, <red_fixed_point>, <green_fixed_point>, <blue_fixed_point>),
cur_tableau_add_mesh = 1992 # (cur_tableau_add_mesh, <mesh_id>, <position_no>, <value_fixed_point>, <value_fixed_point>),
# first value fixed point is the scale factor, second value fixed point is alpha. use 0 for default values
cur_tableau_add_mesh_with_vertex_color = 1993 # (cur_tableau_add_mesh_with_vertex_color, <mesh_id>, <position_no>, <value_fixed_point>, <value_fixed_point>, <value>),
# first value fixed point is the scale factor, second value fixed point is alpha. value is vertex color. use 0 for default values. vertex_color has no default value.
cur_tableau_add_map_icon = 1994 # (cur_tableau_add_map_icon, <map_icon_id>, <position_no>, <value_fixed_point>), value fixed point is the scale factor
cur_tableau_add_troop = 1995 # (cur_tableau_add_troop, <troop_id>, <position_no>, <animation_id>, <instance_no>),
# if instance_no value is 0 or less, then the face is not generated randomly (important for heroes)
cur_tableau_add_horse = 1996 # (cur_tableau_add_horse, <item_id>, <position_no>, <animation_id>),
cur_tableau_set_override_flags = 1997 # (cur_tableau_set_override_flags, <value>),
cur_tableau_clear_override_items = 1998 # (cur_tableau_clear_override_items),
cur_tableau_add_override_item = 1999 # (cur_tableau_add_override_item, <item_kind_id>),
cur_tableau_add_mesh_with_scale_and_vertex_color = 2000 # (cur_tableau_add_mesh_with_scale_and_vertex_color, <mesh_id>, <position_no>, <position_no>, <value_fixed_point>, <value>),
# second position_no is x,y,z scale factors (with fixed point values). value fixed point is alpha. value is vertex color. use 0 for default values. scale and vertex_color has no default values.
mission_cam_set_mode = 2001 # (mission_cam_set_mode, <mission_cam_mode>, <duration-in-1/1000-seconds>, <value>)
# when leaving manual mode, duration defines the animation time from the initial position to the new position.
# set as 0 for instant camera position update. if value = 0, then camera velocity will be linear, else it will be non-linear
mission_get_time_speed = 2002 # (mission_get_time_speed, <destination_fixed_point>),
mission_set_time_speed = 2003 # (mission_set_time_speed, <value_fixed_point>) this works only when cheat mode is enabled
mission_time_speed_move_to_value = 2004 # (mission_speed_move_to_value, <value_fixed_point>, <duration-in-1/1000-seconds>) this works only when cheat mode is enabled
mission_set_duel_mode = 2006 # (mission_set_duel_mode, <value>), value: 0 = off, 1 = on
mission_cam_set_screen_color = 2008 # (mission_cam_set_screen_color, <value>), value is color together with alpha
mission_cam_animate_to_screen_color = 2009 # (mission_cam_animate_to_screen_color, <value>, <duration-in-1/1000-seconds>), value is color together with alpha
mission_cam_get_position = 2010 # (mission_cam_get_position, <position_register_no>)
mission_cam_set_position = 2011 # (mission_cam_set_position, <position_register_no>)
mission_cam_animate_to_position = 2012 # (mission_cam_animate_to_position, <position_register_no>, <duration-in-1/1000-seconds>, <value>)
# if value = 0, then camera velocity will be linear. else it will be non-linear
mission_cam_get_aperture = 2013 # (mission_cam_get_aperture, <destination>)
mission_cam_set_aperture = 2014 # (mission_cam_set_aperture, <value>)
mission_cam_animate_to_aperture = 2015 # (mission_cam_animate_to_aperture, <value>, <duration-in-1/1000-seconds>, <value>)
# if value = 0, then camera velocity will be linear. else it will be non-linear
mission_cam_animate_to_position_and_aperture = 2016 # (mission_cam_animate_to_position_and_aperture, <position_register_no>, <value>, <duration-in-1/1000-seconds>, <value>)
# if value = 0, then camera velocity will be linear. else it will be non-linear
mission_cam_set_target_agent = 2017 # (mission_cam_set_target_agent, <agent_id>, <value>) if value = 0 then do not use agent's rotation, else use agent's rotation
mission_cam_clear_target_agent = 2018 # (mission_cam_clear_target_agent)
mission_cam_set_animation = 2019 # (mission_cam_set_animation, <anim_id>),
talk_info_show = 2020 # (talk_info_show, <hide_or_show>) :0=hide 1=show
talk_info_set_relation_bar = 2021 # (talk_info_set_relation_bar, <value>) set relation bar to a value between -100 to 100, enter an invalid value to hide the bar.
talk_info_set_line = 2022 # (talk_info_set_line, <line_no>, <string_no>)
set_background_mesh = 2031 # (set_background_mesh, <mesh_id>),
set_game_menu_tableau_mesh = 2032 # (set_game_menu_tableau_mesh, <tableau_material_id>, <value>, <position_register_no>), value is passed to tableau_material
# position contains the following information: x = x position of the mesh, y = y position of the mesh, z = scale of the mesh
change_screen_return = 2040 # (change_screen_return),
change_screen_loot = 2041 # (change_screen_loot, <troop_id>),
change_screen_trade = 2042 # (change_screen_trade, <troop_id>),
change_screen_exchange_members = 2043 # (change_screen_exchange_members, [0,1 = exchange_leader], [party_id]), if party id is not given, current party will be used
change_screen_trade_prisoners = 2044 # (change_screen_trade_prisoners),
change_screen_buy_mercenaries = 2045 # (change_screen_buy_mercenaries),
change_screen_view_character = 2046 # (change_screen_view_character),
change_screen_training = 2047 # (change_screen_training),
change_screen_mission = 2048 # (change_screen_mission),
change_screen_map_conversation = 2049 # (change_screen_map_conversation, <troop_id>),
change_screen_exchange_with_party = 2050 # (change_screen_exchange_with_party, <party_id>),
change_screen_equip_other = 2051 # (change_screen_equip_other, <troop_id>),
change_screen_map = 2052 # (change_screen_map),
change_screen_notes = 2053 # (change_screen_notes, <note_type>, <object_id>), Note type can be 1 = troops, 2 = factions, 3 = parties, 4 = quests, 5 = info_pages
change_screen_quit = 2055 # (change_screen_quit),
change_screen_give_members = 2056 # (change_screen_give_members, [party_id]), if party id is not given, current party will be used
change_screen_controls = 2057 # (change_screen_controls),
change_screen_options = 2058 # (change_screen_options),
jump_to_menu = 2060 # (jump_to_menu,<menu_id>),
disable_menu_option = 2061 # (disable_menu_option),
store_trigger_param = 2070 # (store_trigger_param, <destination>, <trigger_param_no>),
store_trigger_param_1 = 2071 # (store_trigger_param_1,<destination>),
store_trigger_param_2 = 2072 # (store_trigger_param_2,<destination>),
store_trigger_param_3 = 2073 # (store_trigger_param_3,<destination>),
set_trigger_result = 2075 # (set_trigger_result, <value>),
agent_ai_get_look_target = 2080 # (agent_ai_get_look_target, <destination>, <agent_id>),
agent_ai_get_move_target = 2081 # (agent_ai_get_move_target, <destination>, <agent_id>),
agent_ai_get_behavior_target = 2082 # (agent_ai_get_behavior_target, <destination>, <agent_id>),
agent_ai_set_can_crouch = 2083 # (agent_ai_set_can_crouch, <agent_id>, <value>), 0 for false, 1 for true.
agent_set_max_hit_points = 2090 # (agent_set_max_hit_points,<agent_id>,<value>,[absolute]), set absolute to 1 if value is absolute, otherwise value will be treated as relative number in range [0..100]
agent_set_damage_modifier = 2091 # (agent_set_damage_modifier, <agent_id>, <value>), value is in percentage, 100 is default
agent_set_accuracy_modifier = 2092 # (agent_set_accuracy_modifier, <agent_id>, <value>), value is in percentage, 100 is default, value can be between [0..1000]
agent_set_speed_modifier = 2093 # (agent_set_speed_modifier, <agent_id>, <value>), value is in percentage, 100 is default, value can be between [0..1000]
agent_set_reload_speed_modifier = 2094 # (agent_set_reload_speed_modifier, <agent_id>, <value>), value is in percentage, 100 is default, value can be between [0..1000]
agent_set_use_speed_modifier = 2095 # (agent_set_use_speed_modifier, <agent_id>, <value>), value is in percentage, 100 is default, value can be between [0..1000]
agent_set_visibility = 2096 # (agent_set_visibility, <agent_id>, <value>), 0 for invisible, 1 for visible.
agent_get_crouch_mode = 2097 # (agent_ai_get_crouch_mode, <destination>, <agent_id>),
agent_set_crouch_mode = 2098 # (agent_ai_set_crouch_mode, <agent_id>, <value>), 0 for false, 1 for true.
agent_set_ranged_damage_modifier = 2099 # (agent_set_ranged_damage_modifier, <agent_id>, <value>), value is in percentage, 100 is default
val_lshift = 2100 # (val_lshift, <destination>, <value>), shifts the bits of destination to left by value amount.
val_rshift = 2101 # (val_rshift, <destination>, <value>), shifts the bits of destination to right by value amount.
val_add = 2105 # (val_add,<destination>,<value>), destination = destination + value
val_sub = 2106 # (val_sub,<destination>,<value>), destination = destination - value
val_mul = 2107 # (val_mul,<destination>,<value>), destination = destination * value
val_div = 2108 # (val_div,<destination>,<value>), destination = destination / value
val_mod = 2109 # (val_mod,<destination>,<value>), destination = destination % value
val_min = 2110 # (val_min,<destination>,<value>), destination = min(destination, value)
val_max = 2111 # (val_max,<destination>,<value>), destination = max(destination, value)
val_clamp = 2112 # (val_clamp,<destination>,<lower_bound>, <upper_bound>), destination = max(min(destination,<upper_bound> - 1),<lower_bound>)
val_abs = 2113 # (val_abs,<destination>), destination = abs(value)
val_or = 2114 # (val_or,<destination>,<value>), destination = destination | value
val_and = 2115 # (val_and,<destination>,<value>), destination = destination & value
store_or = 2116 # (store_or,<destination>,<value_1>,<value_2>), destination = value_1 | value_2
store_and = 2117 # (store_and,<destination>,<value_1>,<value_2>), destination = value_1 & value_2
store_mod = 2119 # (store_mod,<destination>,<value_1>,<value_2>), destination = value_1 % value_2
store_add = 2120 # (store_add,<destination>,<value_1>,<value_2>), destination = value_1 + value_2
store_sub = 2121 # (store_sub,<destination>,<value_1>,<value_2>), destination = value_1 - value_2
store_mul = 2122 # (store_mul,<destination>,<value_1>,<value_2>), destination = value_1 * value_2
store_div = 2123 # (store_div,<destination>,<value_1>,<value_2>), destination = value_1 / value_2
set_fixed_point_multiplier = 2124 # (set_fixed_point_multiplier, <value>), sets precision of values named as value_fixed_point or destination_fixed_point, default is 100
store_sqrt = 2125 # (store_sqrt, <destination_fixed_point>, <value_fixed_point>), takes square root of the value
store_pow = 2126 # (store_pow, <destination_fixed_point>, <value_fixed_point>, <value_fixed_point), takes square root of the value
store_sin = 2127 # (store_sin, <destination_fixed_point>, <value_fixed_point>), takes sine of the value that is in degrees
store_cos = 2128 # (store_cos, <destination_fixed_point>, <value_fixed_point>), takes cosine of the value that is in degrees
store_tan = 2129 # (store_tan, <destination_fixed_point>, <value_fixed_point>), takes tangent of the value that is in degrees
convert_to_fixed_point = 2130 # (convert_to_fixed_point, <destination_fixed_point>), multiplies the value with the fixed point multiplier
convert_from_fixed_point = 2131 # (convert_from_fixed_point, <destination>), divides the value with the fixed point multiplier
assign = 2133 # (assign,<destination>,<value>), had to put this here so that it can be called from conditions.
shuffle_range = 2134 # (shuffle_range,<reg_no>,<reg_no>),
store_random = 2135 # (store_random,<destination>,<range_high>), depreciated: gets random number in range [0, range_high - 1]
store_random_in_range = 2136 # (store_random_in_range,<destination>,<range_low>,<range_high>), gets random number in range [range_low, range_high - 1]
store_asin = 2140 # (store_asin, <destination_fixed_point>, <value_fixed_point>),
store_acos = 2141 # (store_acos, <destination_fixed_point>, <value_fixed_point>),
store_atan = 2142 # (store_atan, <destination_fixed_point>, <value_fixed_point>),
store_atan2 = 2143 # (store_atan2, <destination_fixed_point>, <value_fixed_point>, <value_fixed_point>), first value is y, second is x
store_troop_gold = 2149 # (store_troop_gold,<destination>,<troop_id>),
store_num_free_stacks = 2154 # (store_num_free_stacks,<destination>,<party_id>),
store_num_free_prisoner_stacks = 2155 # (store_num_free_prisoner_stacks,<destination>,<party_id>),
store_party_size = 2156 # (store_party_size,<destination>,[party_id]),
store_party_size_wo_prisoners = 2157 # (store_party_size_wo_prisoners,<destination>,[party_id]),
store_troop_kind_count = 2158 # (store_troop_kind_count,<destination>,<troop_id>), deprecated: use party_count_members_of_type instead
store_num_regular_prisoners = 2159 # (store_mum_regular_prisoners,<destination>,<party_id>),
store_troop_count_companions = 2160 # (store_troop_count_companions,<destination>,<troop_id>,[party_id]),
store_troop_count_prisoners = 2161 # (store_troop_count_prisoners,<destination>,<troop_id>,[party_id]),
store_item_kind_count = 2165 # (store_item_kind_count,<destination>,<item_id>,[troop_id]),
store_free_inventory_capacity = 2167 # (store_free_inventory_capacity,<destination>,[troop_id]),
store_skill_level = 2170 # (store_skill_level,<destination>,<skill_id>,[troop_id]),
store_character_level = 2171 # (store_character_level,<destination>,[troop_id]),
store_attribute_level = 2172 # (store_attribute_level,<destination>,<troop_id>,<attribute_id>),
store_troop_faction = 2173 # (store_troop_faction,<destination>,<troop_id>),
store_faction_of_troop = 2173 # (store_troop_faction,<destination>,<troop_id>),
store_troop_health = 2175 # (store_troop_health,<destination>,<troop_id>,[absolute]),
# set absolute to 1 to get actual health; otherwise this will return percentage health in range (0-100)
store_proficiency_level = 2176 # (store_proficiency_level,<destination>,<troop_id>,<attribute_id>),
store_relation = 2190 # (store_relation,<destination>,<faction_id_1>,<faction_id_2>),
set_conversation_speaker_troop = 2197 # (set_conversation_speaker_troop, <troop_id>),
set_conversation_speaker_agent = 2198 # (set_conversation_speaker_troop, <agent_id>),
store_conversation_agent = 2199 # (store_conversation_agent,<destination>),
store_conversation_troop = 2200 # (store_conversation_troop,<destination>),
store_partner_faction = 2201 # (store_partner_faction,<destination>),
store_encountered_party = 2202 # (store_encountered_party,<destination>),
store_encountered_party2 = 2203 # (store_encountered_party2,<destination>),
store_faction_of_party = 2204 # (store_faction_of_party, <destination>, <party_id>),
set_encountered_party = 2205 # (set_encountered_party,<destination>),
store_current_scene = 2211 # (store_current_scene,<destination>),
store_zoom_amount = 2220 # (store_zoom_amount, <destination_fixed_point>),
set_zoom_amount = 2221 # (set_zoom_amount, <value_fixed_point>),
is_zoom_disabled = 2222 # (is_zoom_disabled),
store_item_value = 2230 # (store_item_value,<destination>,<item_id>),
store_troop_value = 2231 # (store_troop_value,<destination>,<troop_id>),
store_partner_quest = 2240 # (store_partner_quest,<destination>),
store_random_quest_in_range = 2250 # (store_random_quest_in_range,<destination>,<lower_bound>,<upper_bound>),
store_random_troop_to_raise = 2251 # (store_random_troop_to_raise,<destination>,<lower_bound>,<upper_bound>),
store_random_troop_to_capture = 2252 # (store_random_troop_to_capture,<destination>,<lower_bound>,<upper_bound>),
store_random_party_in_range = 2254 # (store_random_party_in_range,<destination>,<lower_bound>,<upper_bound>),
store01_random_parties_in_range = 2255 # (store01_random_parties_in_range,<lower_bound>,<upper_bound>), stores two random, different parties in a range to reg0 and reg1.
store_random_horse = 2257 # (store_random_horse,<destination>)
store_random_equipment = 2258 # (store_random_equipment,<destination>)
store_random_armor = 2259 # (store_random_armor,<destination>)
store_quest_number = 2261 # (store_quest_number,<destination>,<quest_id>),
store_quest_item = 2262 # (store_quest_item,<destination>,<item_id>),
store_quest_troop = 2263 # (store_quest_troop,<destination>,<troop_id>),
store_current_hours = 2270 # (store_current_hours,<destination>),
store_time_of_day = 2271 # (store_time_of_day,<destination>),
store_current_day = 2272 # (store_current_day,<destination>),
is_currently_night = 2273 # (is_currently_night),
store_distance_to_party_from_party = 2281 # (store_distance_to_party_from_party,<destination>,<party_id>,<party_id>),
get_party_ai_behavior = 2290 # (get_party_ai_behavior,<destination>,<party_id>),
get_party_ai_object = 2291 # (get_party_ai_object,<destination>,<party_id>),
party_get_ai_target_position = 2292 # (party_get_ai_target_position,<position_no>,<party_id>),
get_party_ai_current_behavior = 2293 # (get_party_ai_current_behavior,<destination>,<party_id>),
get_party_ai_current_object = 2294 # (get_party_ai_current_object,<destination>,<party_id>),
store_num_parties_created = 2300 # (store_num_parties_created,<destination>,<party_template_id>),
store_num_parties_destroyed = 2301 # (store_num_parties_destroyed,<destination>,<party_template_id>),
store_num_parties_destroyed_by_player = 2302 # (store_num_parties_destroyed_by_player,<destination>,<party_template_id>),
store_num_parties_of_template = 2310 # (store_num_parties_of_template,<destination>,<party_template_id>),
store_random_party_of_template = 2311 # (store_random_party_of_template,<destination>,<party_template_id>), fails if no party exists with tempolate_id (expensive)
str_is_empty = 2318 # (str_is_empty, <string_register>),
str_clear = 2319 # (str_clear, <string_register>)
str_store_string = 2320 # (str_store_string,<string_register>,<string_id>),
str_store_string_reg = 2321 # (str_store_string,<string_register>,<string_no>), copies one string register to another.
str_store_troop_name = 2322 # (str_store_troop_name,<string_register>,<troop_id>),
str_store_troop_name_plural = 2323 # (str_store_troop_name_plural,<string_register>,<troop_id>),
str_store_troop_name_by_count = 2324 # (str_store_troop_name_by_count,<string_register>,<troop_id>,<number>),
str_store_item_name = 2325 # (str_store_item_name,<string_register>,<item_id>),
str_store_item_name_plural = 2326 # (str_store_item_name_plural,<string_register>,<item_id>),
str_store_item_name_by_count = 2327 # (str_store_item_name_by_count,<string_register>,<item_id>),
str_store_party_name = 2330 # (str_store_party_name,<string_register>,<party_id>),
str_store_agent_name = 2332 # (str_store_agent_name,<string_register>,<agent_id>),
str_store_faction_name = 2335 # (str_store_faction_name,<string_register>,<faction_id>),
str_store_quest_name = 2336 # (str_store_quest_name,<string_register>,<quest_id>),
str_store_info_page_name = 2337 # (str_store_info_page_name,<string_register>,<info_page_id>),
str_store_date = 2340 # (str_store_date,<string_register>,<number_of_hours_to_add_to_the_current_date>),
str_store_troop_name_link = 2341 # (str_store_troop_name_link,<string_register>,<troop_id>),
str_store_party_name_link = 2342 # (str_store_party_name_link,<string_register>,<party_id>),
str_store_faction_name_link = 2343 # (str_store_faction_name_link,<string_register>,<faction_id>),
str_store_quest_name_link = 2344 # (str_store_quest_name_link,<string_register>,<quest_id>),
str_store_info_page_name_link = 2345 # (str_store_info_page_name_link,<string_register>,<info_page_id>),
str_store_class_name = 2346 # (str_store_class_name,<stribg_register>,<class_id>)
str_store_player_username = 2350 # (str_store_player_username,<string_register>,<player_id>), used in multiplayer mode only
str_store_server_password = 2351 # (str_store_server_password, <string_register>),
str_store_server_name = 2352 # (str_store_server_name, <string_register>),
str_store_welcome_message = 2353 # (str_store_welcome_message, <string_register>),
str_encode_url = 2355 # (str_encode_url, <string_register>),
store_remaining_team_no = 2360 # (store_remaining_team_no,<destination>),
store_mission_timer_a_msec = 2365 # (store_mission_timer_a_msec,<destination>),
store_mission_timer_b_msec = 2366 # (store_mission_timer_b_msec,<destination>),
store_mission_timer_c_msec = 2367 # (store_mission_timer_c_msec,<destination>),
store_mission_timer_a = 2370 # (store_mission_timer_a,<destination>),
store_mission_timer_b = 2371 # (store_mission_timer_b,<destination>),
store_mission_timer_c = 2372 # (store_mission_timer_c,<destination>),
reset_mission_timer_a = 2375 # (reset_mission_timer_a),
reset_mission_timer_b = 2376 # (reset_mission_timer_b),
reset_mission_timer_c = 2377 # (reset_mission_timer_c),
set_cheer_at_no_enemy = 2379 # (set_cheer_at_no_enemy, <value>), values: 0 = do not cheer (do as commander says), 1 = cheer
store_enemy_count = 2380 # (store_enemy_count,<destination>),
store_friend_count = 2381 # (store_friend_count,<destination>),
store_ally_count = 2382 # (store_ally_count,<destination>),
store_defender_count = 2383 # (store_defender_count,<destination>),
store_attacker_count = 2384 # (store_attacker_count,<destination>),
store_normalized_team_count = 2385 # (store_normalized_team_count,<destination>, <team_no>),
# counts the number of agents belonging to a team and normalizes the result regarding battle_size and advantage.
set_postfx = 2386 # (set_postfx,<value>),
set_river_shader_to_mud = 2387 # (set_river_shader_to_mud), changes river material for muddy env
show_troop_details = 2388 # (show_troop_details, <troop_id>, <position>, <troop_price>),
set_skybox = 2389 # (set_skybox, <non_hdr_skybox_index>, <hdr_skybox_index>), forces selected skybox for a scene, use -1 to disable
set_startup_sun_light = 2390 # (set_startup_sun_light, <r>, <g>, <b>), changes the sun light color
set_startup_ambient_light = 2391 # (set_startup_ambient_light, <r>, <g>, <b>), changes the ambient light color
set_startup_ground_ambient_light = 2392 # (set_startup_ground_ambient_light, <r>, <g>, <b>), changes the ground ambient light color
rebuild_shadow_map = 2393 # (rebuild_shadow_map),
set_shader_param_int = 2400 # (set_shader_param_int, <parameter_name>, <value>), sets the int shader parameter <parameter_name> to <value>
set_shader_param_float = 2401 # (set_shader_param_float, <parameter_name>, <value>), sets the float shader parameter <parameter_name> to <value>
set_shader_param_float4 = 2402 # (set_shader_param_float4, <parameter_name>, <value_x>, <value_y>, <value_z>, <value_w>),
# sets the float4 shader parameter <parameter_name> to <value_x/y/z/w>
set_shader_param_float4x4 = 2403 # (set_shader_param_float4x4, <parameter_name>, [0][0], [0][1], [0][2], [1][0], [1][1], [1][2], [2][0], [2][1], [2][2], [3][0], [3][1], [3][2]),
# sets the float4x4 shader parameter <parameter_name> to the given values. w components are 0001 by default.
opcode_names = dict((opcode, name) for name, opcode in globals().iteritems() if isinstance(opcode, int))
def get_opcode_name(opcode):
prefix = None
try:
if opcode & this_or_next:
prefix = "this_or_next|"
opcode ^= this_or_next
if opcode & neg:
prefix = prefix + "neg|" if prefix else "neg|"
opcode ^= neg
opname = opcode_names[opcode]
except (KeyError, TypeError):
opname = repr(opcode)
return prefix + opname if prefix else opname
def print_operations_block(block):
indent = 0
for operation in block:
if isinstance(operation, (tuple, list)):
opcode = operation[0]
operation_list = [get_opcode_name(opcode)] + [repr(entry) for entry in operation[1:]]
else:
opcode = operation
operation_list = [get_opcode_name(opcode)]
if opcode in (else_try, try_end) and indent > 0:
indent -= 1
print "{0}({1}),".format(" " * indent, ", ".join(operation_list))
if opcode in try_begin_operations or opcode == else_try:
indent += 1
lhs_operations = frozenset([
try_for_range,
try_for_range_backwards,
try_for_parties,
try_for_agents,
store_script_param_1,
store_script_param_2,
store_script_param,
store_repeat_object,
get_global_cloud_amount,
get_global_haze_amount,
options_get_damage_to_player,
options_get_damage_to_friends,
options_get_combat_ai,
options_get_campaign_ai,
options_get_combat_speed,
profile_get_banner_id,
get_achievement_stat,
get_max_players,
player_get_team_no,
player_get_troop_id,
player_get_agent_id,
player_get_gold,
multiplayer_get_my_team,
multiplayer_get_my_troop,
multiplayer_get_my_gold,
multiplayer_get_my_player,
player_get_score,
player_get_kill_count,
player_get_death_count,
player_get_ping,
player_get_is_muted,
player_get_unique_id,
player_get_gender,
player_get_item_id,
player_get_banner_id,
game_get_reduce_campaign_ai,
multiplayer_find_spawn_point,
team_get_bot_kill_count,
team_get_bot_death_count,
team_get_kill_count,
team_get_score,
team_get_faction,
player_get_value_of_original_items,
server_get_renaming_server_allowed,
server_get_changing_game_type_allowed,
server_get_friendly_fire,
server_get_control_block_dir,
server_get_combat_speed,
server_get_add_to_game_servers_list,
server_get_ghost_mode,
server_get_max_num_players,
server_get_melee_friendly_fire,
server_get_friendly_fire_damage_self_ratio,
server_get_friendly_fire_damage_friend_ratio,
server_get_anti_cheat,
troop_get_slot,
party_get_slot,
faction_get_slot,
scene_get_slot,
party_template_get_slot,
agent_get_slot,
quest_get_slot,
item_get_slot,
player_get_slot,
team_get_slot,
scene_prop_get_slot,
store_last_sound_channel,
get_angle_between_positions,
get_distance_between_positions,
get_distance_between_positions_in_meters,
get_sq_distance_between_positions,
get_sq_distance_between_positions_in_meters,
get_sq_distance_between_position_heights,
position_get_x,
position_get_y,
position_get_z,
position_get_scale_x,
position_get_scale_y,
position_get_scale_z,
position_get_rotation_around_z,
position_normalize_origin,
position_get_rotation_around_x,
position_get_rotation_around_y,
position_get_distance_to_terrain,
position_get_distance_to_ground_level,
create_text_overlay,
create_mesh_overlay,
create_button_overlay,
create_image_button_overlay,
create_slider_overlay,
create_progress_overlay,
create_combo_button_overlay,
create_text_box_overlay,
create_check_box_overlay,
create_simple_text_box_overlay,
create_image_button_overlay_with_tableau_material,
create_mesh_overlay_with_tableau_material,
create_game_button_overlay,
create_in_game_button_overlay,
create_number_box_overlay,
create_listbox_overlay,
create_mesh_overlay_with_item_id,
overlay_get_position,
create_combo_label_overlay,
get_average_game_difficulty,
get_level_boundary,
faction_get_color,
troop_get_type,
troop_get_xp,
troop_get_class,
troop_inventory_slot_get_item_amount,
troop_inventory_slot_get_item_max_amount,
troop_get_inventory_capacity,
troop_get_inventory_slot,
troop_get_inventory_slot_modifier,
troop_get_upgrade_troop,
item_get_type,
party_get_num_companions,
party_get_num_prisoners,
party_get_current_terrain,
party_get_template_id,
party_count_members_of_type,
party_count_companions_of_type,
party_count_prisoners_of_type,
party_get_free_companions_capacity,
party_get_free_prisoners_capacity,
party_get_helpfulness,
party_get_ai_initiative,
party_get_num_companion_stacks,
party_get_num_prisoner_stacks,
party_stack_get_troop_id,
party_stack_get_size,
party_stack_get_num_wounded,
party_stack_get_troop_dna,
party_prisoner_stack_get_troop_id,
party_prisoner_stack_get_size,
party_prisoner_stack_get_troop_dna,
party_get_cur_town,
party_get_morale,
party_get_battle_opponent,
party_get_icon,
party_get_skill_level,
get_battle_advantage,
party_get_attached_to,
party_get_num_attached_parties,
party_get_attached_party_with_rank,
get_player_agent_no,
get_player_agent_kill_count,
get_player_agent_own_troop_kill_count,
agent_get_horse,
agent_get_rider,
agent_get_party_id,
agent_get_entry_no,
agent_get_troop_id,
agent_get_item_id,
store_agent_hit_points,
agent_get_kill_count,
agent_get_player_id,
agent_get_wielded_item,
agent_get_ammo,
agent_get_simple_behavior,
agent_get_combat_state,
agent_get_attached_scene_prop,
agent_get_time_elapsed_since_removed,
agent_get_number_of_enemies_following,
agent_get_attack_action,
agent_get_defend_action,
agent_get_group,
agent_get_action_dir,
agent_get_animation,
agent_get_team,
agent_get_class,
agent_get_division,
team_get_hold_fire_order,
team_get_movement_order,
team_get_riding_order,
team_get_weapon_usage_order,
team_get_leader,
agent_get_item_slot,
scene_prop_get_num_instances,
scene_prop_get_instance,
scene_prop_get_visibility,
scene_prop_get_hit_points,
scene_prop_get_max_hit_points,
scene_prop_get_team,
agent_get_ammo_for_slot,
agent_deliver_damage_to_agent_advanced,
team_get_gap_distance,
scene_item_get_num_instances,
scene_item_get_instance,
scene_spawned_item_get_num_instances,
scene_spawned_item_get_instance,
prop_instance_get_variation_id,
prop_instance_get_variation_id_2,
prop_instance_get_position,
prop_instance_get_starting_position,
prop_instance_get_scale,
prop_instance_get_scene_prop_kind,
prop_instance_is_animating,
prop_instance_get_animation_target_position,
agent_get_item_cur_ammo,
mission_get_time_speed,
mission_cam_get_aperture,
store_trigger_param,
store_trigger_param_1,
store_trigger_param_2,
store_trigger_param_3,
agent_ai_get_look_target,
agent_ai_get_move_target,
agent_ai_get_behavior_target,
agent_get_crouch_mode,
store_or,
store_and,
store_mod,
store_add,
store_sub,
store_mul,
store_div,
store_sqrt,
store_pow,
store_sin,
store_cos,
store_tan,
assign,
store_random,
store_random_in_range,
store_asin,
store_acos,
store_atan,
store_atan2,
store_troop_gold,
store_num_free_stacks,
store_num_free_prisoner_stacks,
store_party_size,
store_party_size_wo_prisoners,
store_troop_kind_count,
store_num_regular_prisoners,
store_troop_count_companions,
store_troop_count_prisoners,
store_item_kind_count,
store_free_inventory_capacity,
store_skill_level,
store_character_level,
store_attribute_level,
store_troop_faction,
store_troop_health,
store_proficiency_level,
store_relation,
store_conversation_agent,
store_conversation_troop,
store_partner_faction,
store_encountered_party,
store_encountered_party2,
store_faction_of_party,
store_current_scene,
store_zoom_amount,
store_item_value,
store_troop_value,
store_partner_quest,
store_random_quest_in_range,
store_random_troop_to_raise,
store_random_troop_to_capture,
store_random_party_in_range,
store_random_horse,
store_random_equipment,
store_random_armor,
store_quest_number,
store_quest_item,
store_quest_troop,
store_current_hours,
store_time_of_day,
store_current_day,
store_distance_to_party_from_party,
get_party_ai_behavior,
get_party_ai_object,
get_party_ai_current_behavior,
get_party_ai_current_object,
store_num_parties_created,
store_num_parties_destroyed,
store_num_parties_destroyed_by_player,
store_num_parties_of_template,
store_random_party_of_template,
store_remaining_team_no,
store_mission_timer_a_msec,
store_mission_timer_b_msec,
store_mission_timer_c_msec,
store_mission_timer_a,
store_mission_timer_b,
store_mission_timer_c,
store_enemy_count,
store_friend_count,
store_ally_count,
store_defender_count,
store_attacker_count,
store_normalized_team_count,
])
global_lhs_operations = frozenset([
val_lshift,
val_rshift,
val_add,
val_sub,
val_mul,
val_div,
val_max,
val_min,
val_mod,
] + list(lhs_operations))
can_fail_operations = frozenset([
ge,
eq,
gt,
is_between,
entering_town,
map_free,
encountered_party_is_attacker,
conversation_screen_is_active,
troop_is_hero,
troop_is_wounded,
key_is_down,
key_clicked,
game_key_is_down,
game_key_clicked,
hero_can_join,
hero_can_join_as_prisoner,
party_can_join,
party_can_join_as_prisoner,
troops_can_join,
troops_can_join_as_prisoner,
party_can_join_party,
main_party_has_troop,
party_is_in_town,
party_is_in_any_town,
party_is_active,
player_has_item,
troop_has_item_equipped,
troop_is_mounted,
troop_is_guarantee_ranged,
troop_is_guarantee_horse,
player_is_active,
multiplayer_is_server,
multiplayer_is_dedicated_server,
game_in_multiplayer_mode,
player_is_admin,
player_is_busy_with_menus,
player_item_slot_is_picked_up,
check_quest_active,
check_quest_finished,
check_quest_succeeded,
check_quest_failed,
check_quest_concluded,
is_trial_version,
is_edit_mode_enabled,
troop_slot_eq,
party_slot_eq,
faction_slot_eq,
scene_slot_eq,
party_template_slot_eq,
agent_slot_eq,
quest_slot_eq,
item_slot_eq,
player_slot_eq,
team_slot_eq,
scene_prop_slot_eq,
troop_slot_ge,
party_slot_ge,
faction_slot_ge,
scene_slot_ge,
party_template_slot_ge,
agent_slot_ge,
quest_slot_ge,
item_slot_ge,
player_slot_ge,
team_slot_ge,
scene_prop_slot_ge,
position_has_line_of_sight_to_position,
position_is_behind_position,
is_presentation_active,
all_enemies_defeated,
race_completed_by_player,
num_active_teams_le,
main_hero_fallen,
lt,
neq,
le,
teams_are_enemies,
agent_is_alive,
agent_is_wounded,
agent_is_human,
agent_is_ally,
agent_is_non_player,
agent_is_defender,
agent_is_active,
agent_is_routed,
agent_is_in_special_mode,
agent_is_in_parried_animation,
class_is_listening_order,
agent_check_offer_from_agent,
entry_point_is_auto_generated,
scene_prop_has_agent_on_it,
agent_is_alarmed,
agent_is_in_line_of_sight,
scene_prop_get_instance,
scene_item_get_instance,
scene_spawned_item_get_instance,
scene_allows_mounted_units,
prop_instance_is_valid,
prop_instance_intersects_with_prop_instance,
agent_has_item_equipped,
map_get_land_position_around_position,
map_get_water_position_around_position,
is_zoom_disabled,
is_currently_night,
store_random_party_of_template,
str_is_empty,
])
try_begin_operations = frozenset([
try_begin,
try_for_range,
try_for_range_backwards,
try_for_parties,
try_for_agents,
])
| bsd-3-clause | -1,899,764,248,712,151,800 | 83.137828 | 337 | 0.566457 | false |
pahaz/fabtools | fabtools/require/users.py | 1 | 1964 | """
System users
============
"""
from fabtools.files import is_file
from fabtools.user import *
import fabtools.require
def user(name, comment=None, home=None, group=None, extra_groups=None,
create_home=False, skeleton_dir=None, password=None, system=False,
shell=None, uid=None):
"""
Require a user and its home directory.
::
from fabtools import require
# This will also create a home directory for alice
require.user('alice')
# Sometimes we don't need a home directory
require.user('mydaemon', create_home=False)
.. note:: This function can be accessed directly from the
``fabtools.require`` module for convenience.
"""
# Make sure the user exists
if not exists(name):
create(name, comment=comment, home=home, group=group,
extra_groups=extra_groups, create_home=create_home,
skeleton_dir=skeleton_dir, password=password, system=system,
shell=shell, uid=uid)
else:
modify(name, comment=comment, home=home, group=group,
extra_groups=extra_groups, password=password,
shell=shell, uid=uid)
# Make sure the home directory exists and is owned by user
if home:
fabtools.require.directory(home, owner=name, use_sudo=True)
def sudoer(username, hosts="ALL", operators="ALL", passwd=False, commands="ALL"):
"""
Require sudo permissions for a given user.
.. note:: This function can be accessed directly from the
``fabtools.require`` module for convenience.
"""
tags = "PASSWD:" if passwd else "NOPASSWD:"
spec = "%(username)s %(hosts)s=(%(operators)s) %(tags)s %(commands)s" % locals()
filename = '/etc/sudoers.d/fabtools-%s' % username
if is_file(filename):
sudo('chmod 0640 %(filename)s && rm -f %(filename)s' % locals())
sudo('echo "%(spec)s" >%(filename)s && chmod 0440 %(filename)s' % locals(), shell=True)
| bsd-2-clause | 5,924,842,653,718,361,000 | 31.196721 | 91 | 0.635947 | false |
liuzzfnst/tp-libvirt | libvirt/tests/src/virsh_cmd/domain/virsh_setmem.py | 7 | 14922 | import re
import os
import logging
import time
from autotest.client.shared import error
from virttest import virsh
from virttest import utils_libvirtd
from virttest import data_dir
from virttest.utils_test import libvirt
from virttest import utils_misc
from virttest.libvirt_xml import vm_xml
def manipulate_domain(vm_name, action, recover=False):
"""
Save/managedsave/S3/S4 domain or recover it.
"""
tmp_dir = data_dir.get_tmp_dir()
save_file = os.path.join(tmp_dir, vm_name + ".save")
if not recover:
if action == "save":
save_option = ""
result = virsh.save(vm_name, save_file, save_option,
ignore_status=True, debug=True)
libvirt.check_exit_status(result)
elif action == "managedsave":
managedsave_option = ""
result = virsh.managedsave(vm_name, managedsave_option,
ignore_status=True, debug=True)
libvirt.check_exit_status(result)
elif action == "s3":
suspend_target = "mem"
result = virsh.dompmsuspend(vm_name, suspend_target,
ignore_status=True, debug=True)
libvirt.check_exit_status(result)
elif action == "s4":
suspend_target = "disk"
result = virsh.dompmsuspend(vm_name, suspend_target,
ignore_status=True, debug=True)
libvirt.check_exit_status(result)
# Wait domain state change: 'in shutdown' -> 'shut off'
utils_misc.wait_for(lambda: virsh.is_dead(vm_name), 5)
else:
logging.debug("No operation for the domain")
else:
if action == "save":
if os.path.exists(save_file):
result = virsh.restore(save_file, ignore_status=True, debug=True)
libvirt.check_exit_status(result)
os.remove(save_file)
else:
raise error.TestError("No save file for domain restore")
elif action in ["managedsave", "s4"]:
result = virsh.start(vm_name, ignore_status=True, debug=True)
libvirt.check_exit_status(result)
elif action == "s3":
suspend_target = "mem"
result = virsh.dompmwakeup(vm_name, ignore_status=True, debug=True)
libvirt.check_exit_status(result)
else:
logging.debug("No need recover the domain")
def run(test, params, env):
"""
Test command: virsh setmem.
1) Prepare vm environment.
2) Handle params
3) Prepare libvirtd status.
4) Run test command and wait for current memory's stable.
5) Recover environment.
4) Check result.
"""
def vm_proc_meminfo(session):
"""
Get guest total memory
"""
proc_meminfo = session.cmd_output("cat /proc/meminfo")
# verify format and units are expected
return int(re.search(r'MemTotal:\s+(\d+)\s+[kK]B', proc_meminfo).group(1))
def make_domref(domarg, vm_ref, domid, vm_name, domuuid):
"""
Create domain options of command
"""
# Specify domain as argument or parameter
if domarg == "yes":
dom_darg_key = "domainarg"
else:
dom_darg_key = "domain"
# How to reference domain
if vm_ref == "domid":
dom_darg_value = domid
elif vm_ref == "domname":
dom_darg_value = vm_name
elif vm_ref == "domuuid":
dom_darg_value = domuuid
elif vm_ref == "none":
dom_darg_value = None
elif vm_ref == "emptystring":
dom_darg_value = '""'
else: # stick in value directly
dom_darg_value = vm_ref
return {dom_darg_key: dom_darg_value}
def make_sizeref(sizearg, mem_ref, original_mem):
"""
Create size options of command
"""
if sizearg == "yes":
size_darg_key = "sizearg"
else:
size_darg_key = "size"
if mem_ref == "halfless":
size_darg_value = "%d" % (original_mem / 2)
elif mem_ref == "halfmore":
size_darg_value = "%d" % int(original_mem * 1.5) # no fraction
elif mem_ref == "same":
size_darg_value = "%d" % original_mem
elif mem_ref == "emptystring":
size_darg_value = '""'
elif mem_ref == "zero":
size_darg_value = "0"
elif mem_ref == "toosmall":
size_darg_value = "1024"
elif mem_ref == "toobig":
size_darg_value = "1099511627776" # (KiB) One Petabyte
elif mem_ref == "none":
size_darg_value = None
else: # stick in value directly
size_darg_value = mem_ref
return {size_darg_key: size_darg_value}
def cal_deviation(actual, expected):
"""
Calculate deviation of actual result and expected result
"""
numerator = float(actual)
denominator = float(expected)
if numerator > denominator:
numerator = denominator
denominator = float(actual)
return 100 - (100 * (numerator / denominator))
def is_old_libvirt():
"""
Check if libvirt is old version
"""
regex = r'\s+\[--size\]\s+'
return bool(not virsh.has_command_help_match('setmem', regex))
def print_debug_stats(original_inside_mem, original_outside_mem,
test_inside_mem, test_outside_mem,
expected_mem, delta_percentage):
"""
Print debug message for test
"""
# Calculate deviation
inside_deviation = cal_deviation(test_inside_mem, expected_mem)
outside_deviation = cal_deviation(test_outside_mem, expected_mem)
dbgmsg = ("Original inside mem : %d KiB\n"
"Expected inside mem : %d KiB\n"
"Actual inside mem : %d KiB\n"
"Inside mem deviation : %0.2f%%\n"
"Original outside mem : %d KiB\n"
"Expected outside mem : %d KiB\n"
"Actual outside mem : %d KiB\n"
"Outside mem deviation: %0.2f%%\n"
"Acceptable deviation %0.2f%%" % (
original_inside_mem,
expected_mem,
test_inside_mem,
inside_deviation,
original_outside_mem,
expected_mem,
test_outside_mem,
outside_deviation,
delta_percentage))
for dbgline in dbgmsg.splitlines():
logging.debug(dbgline)
# MAIN TEST CODE ###
# Process cartesian parameters
vm_ref = params.get("setmem_vm_ref", "")
mem_ref = params.get("setmem_mem_ref", "")
flags = params.get("setmem_flags", "")
status_error = "yes" == params.get("status_error", "no")
old_libvirt_fail = "yes" == params.get("setmem_old_libvirt_fail", "no")
quiesce_delay = int(params.get("setmem_quiesce_delay", "1"))
domarg = params.get("setmem_domarg", "no")
sizearg = params.get("setmem_sizearg", "no")
libvirt = params.get("libvirt", "on")
delta_percentage = float(params.get("setmem_delta_per", "10"))
start_vm = "yes" == params.get("start_vm", "yes")
vm_name = params.get("main_vm", "virt-tests-vm1")
paused_after_start_vm = "yes" == params.get("paused_after_start_vm", "no")
manipulate_dom_before_setmem = "yes" == params.get(
"manipulate_dom_before_setmem", "no")
manipulate_dom_after_setmem = "yes" == params.get(
"manipulate_dom_after_setmem", "no")
manipulate_action = params.get("manipulate_action", "")
vm = env.get_vm(vm_name)
# Back up domain XML
vmxml = vm_xml.VMXML.new_from_inactive_dumpxml(vm_name)
backup_xml = vmxml.copy()
vmosxml = vmxml.os
need_mkswap = False
if manipulate_action in ['s3', 's4']:
vm.destroy()
BIOS_BIN = "/usr/share/seabios/bios.bin"
if os.path.isfile(BIOS_BIN):
vmosxml.loader = BIOS_BIN
vmxml.os = vmosxml
vmxml.sync()
else:
logging.error("Not find %s on host", BIOS_BIN)
vmxml.set_pm_suspend(vm_name, "yes", "yes")
vm.prepare_guest_agent()
if manipulate_action == "s4":
need_mkswap = not vm.has_swap()
if need_mkswap:
logging.debug("Creating swap partition")
vm.create_swap_partition()
memballoon_model = params.get("memballoon_model", "")
if memballoon_model:
vm.destroy()
vmxml.del_device('memballoon', by_tag=True)
memballoon_xml = vmxml.get_device_class('memballoon')()
memballoon_xml.model = memballoon_model
vmxml.add_device(memballoon_xml)
logging.info(memballoon_xml)
vmxml.sync()
vm.start()
remove_balloon_driver = "yes" == params.get("remove_balloon_driver", "no")
if remove_balloon_driver:
if not vm.is_alive():
logging.error("Can't remove module as guest not running")
else:
session = vm.wait_for_login()
cmd = "rmmod virtio_balloon"
s_rmmod, o_rmmod = session.cmd_status_output(cmd)
if s_rmmod != 0:
logging.error("Fail to remove module virtio_balloon in guest:\n%s",
o_rmmod)
session.close()
if start_vm:
if not vm.is_alive():
vm.start()
if paused_after_start_vm:
vm.resume()
session = vm.wait_for_login()
original_inside_mem = vm_proc_meminfo(session)
session.close()
if paused_after_start_vm:
vm.pause()
original_outside_mem = vm.get_used_mem()
else:
if vm.is_alive():
vm.destroy()
# Retrieve known mem value, convert into kilobytes
original_inside_mem = int(params.get("mem", "1024")) * 1024
original_outside_mem = original_inside_mem
domid = vm.get_id()
domuuid = vm.get_uuid()
uri = vm.connect_uri
old_libvirt = is_old_libvirt()
if old_libvirt:
logging.info("Running test on older libvirt")
use_kilobytes = True
else:
logging.info("Running test on newer libvirt")
use_kilobytes = False
# Argument pattern is complex, build with dargs
dargs = {'flagstr': flags,
'use_kilobytes': use_kilobytes,
'uri': uri, 'ignore_status': True, "debug": True}
dargs.update(make_domref(domarg, vm_ref, domid, vm_name, domuuid))
dargs.update(make_sizeref(sizearg, mem_ref, original_outside_mem))
# Prepare libvirtd status
libvirtd = utils_libvirtd.Libvirtd()
if libvirt == "off":
libvirtd.stop()
else:
if not libvirtd.is_running():
libvirtd.start()
if status_error or (old_libvirt_fail & old_libvirt):
logging.info("Error Test: Expecting an error to occur!")
try:
memory_change = True
if manipulate_dom_before_setmem:
manipulate_domain(vm_name, manipulate_action)
if manipulate_action in ['save', 'managedsave', 's4']:
memory_change = False
result = virsh.setmem(**dargs)
status = result.exit_status
if status is 0:
logging.info(
"Waiting %d seconds for VM memory to settle", quiesce_delay)
# It takes time for kernel to settle on new memory
# and current clean pages is not predictable. Therefor,
# extremely difficult to determine quiescence, so
# sleep one second per error percent is reasonable option.
time.sleep(quiesce_delay)
if manipulate_dom_before_setmem:
manipulate_domain(vm_name, manipulate_action, True)
if manipulate_dom_after_setmem:
manipulate_domain(vm_name, manipulate_action)
manipulate_domain(vm_name, manipulate_action, True)
# Recover libvirtd status
if libvirt == "off":
libvirtd.start()
# Gather stats if not running error test
if not status_error and not old_libvirt_fail:
if not memory_change:
test_inside_mem = original_inside_mem
test_outside_mem = original_outside_mem
else:
if vm.state() == "shut off":
vm.start()
# Make sure it's never paused
vm.resume()
session = vm.wait_for_login()
# Actual results
test_inside_mem = vm_proc_meminfo(session)
session.close()
test_outside_mem = vm.get_used_mem()
# Expected results for both inside and outside
if remove_balloon_driver:
expected_mem = original_outside_mem
else:
if not memory_change:
expected_mem = original_inside_mem
elif sizearg == "yes":
expected_mem = int(dargs["sizearg"])
else:
expected_mem = int(dargs["size"])
print_debug_stats(original_inside_mem, original_outside_mem,
test_inside_mem, test_outside_mem,
expected_mem, delta_percentage)
# Don't care about memory comparison on error test
outside_pass = cal_deviation(test_outside_mem,
expected_mem) <= delta_percentage
inside_pass = cal_deviation(test_inside_mem,
expected_mem) <= delta_percentage
if status is not 0 or not outside_pass or not inside_pass:
msg = "test conditions not met: "
if status is not 0:
msg += "Non-zero virsh setmem exit code. "
if not outside_pass:
msg += "Outside memory deviated. "
if not inside_pass:
msg += "Inside memory deviated. "
raise error.TestFail(msg)
return # Normal test passed
elif not status_error and old_libvirt_fail:
if status is 0:
if old_libvirt:
raise error.TestFail("Error test did not result in an error")
else:
if not old_libvirt:
raise error.TestFail("Newer libvirt failed when it should not")
else: # Verify an error test resulted in error
if status is 0:
raise error.TestFail("Error test did not result in an error")
finally:
if need_mkswap:
vm.cleanup_swap()
vm.destroy()
backup_xml.sync()
| gpl-2.0 | 420,709,355,759,119,940 | 36.681818 | 83 | 0.548184 | false |
mrquim/repository.mrquim | plugin.video.poseidon/resources/lib/modules/tvmaze.py | 4 | 4183 | # -*- coding: utf-8 -*-
'''
Poseidon Add-on
Copyright (C) 2016 Poseidon
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib,json
from resources.lib.modules import cache
from resources.lib.modules import client
class tvMaze:
def __init__(self, show_id = None):
self.api_url = 'http://api.tvmaze.com/%s%s'
self.show_id = show_id
def showID(self, show_id = None):
if (show_id != None):
self.show_id = show_id
return show_id
return self.show_id
def request(self, endpoint, query = None):
try:
# Encode the queries, if there is any...
if (query != None):
query = '?' + urllib.urlencode(query)
else:
query = ''
# Make the request
request = self.api_url % (endpoint, query)
# Send the request and get the response
# Get the results from cache if available
response = cache.get(client.request, 24, request)
# Retrun the result as a dictionary
return json.loads(response)
except:
pass
return {}
def showLookup(self, type, id):
try:
result = self.request('lookup/shows', {type: id})
# Storing the show id locally
if ('id' in result):
self.show_id = result['id']
return result
except:
pass
return {}
def shows(self, show_id = None, embed = None):
try:
if (not self.showID(show_id)):
raise Exception()
result = self.request('shows/%d' % self.show_id)
# Storing the show id locally
if ('id' in result):
self.show_id = result['id']
return result
except:
pass
return {}
def showSeasons(self, show_id = None):
try:
if (not self.showID(show_id)):
raise Exception()
result = self.request('shows/%d/seasons' % int( self.show_id ))
if (len(result) > 0 and 'id' in result[0]):
return result
except:
pass
return []
def showSeasonList(self, show_id):
return {}
def showEpisodeList(self, show_id = None, specials = False):
try:
if (not self.showID(show_id)):
raise Exception()
result = self.request('shows/%d/episodes' % int( self.show_id ), 'specials=1' if specials else '')
if (len(result) > 0 and 'id' in result[0]):
return result
except:
pass
return []
def episodeAbsoluteNumber(self, thetvdb, season, episode):
try:
url = 'http://thetvdb.com/api/%s/series/%s/default/%01d/%01d' % ('MUQ2MkYyRjkwMDMwQzQ0NA=='.decode('base64'), thetvdb, int(season), int(episode))
r = client.request(url)
episode = client.parseDOM(r, 'absolute_number')[0]
return int( episode )
except:
pass
return episode
def getTVShowTranslation(self, thetvdb, lang):
try:
url = 'http://thetvdb.com/api/%s/series/%s/%s.xml' % ('MUQ2MkYyRjkwMDMwQzQ0NA=='.decode('base64'), thetvdb, lang)
r = client.request(url)
title = client.parseDOM(r, 'SeriesName')[0]
title = client.replaceHTMLCodes(title)
title = title.encode('utf-8')
return title
except:
pass
| gpl-2.0 | 7,305,247,987,600,014,000 | 25.643312 | 157 | 0.549606 | false |
frankvdp/django | django/utils/ipv6.py | 129 | 1350 | import ipaddress
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
def clean_ipv6_address(ip_str, unpack_ipv4=False,
error_message=_("This is not a valid IPv6 address.")):
"""
Clean an IPv6 address string.
Raise ValidationError if the address is invalid.
Replace the longest continuous zero-sequence with "::", remove leading
zeroes, and make sure all hextets are lowercase.
Args:
ip_str: A valid IPv6 address.
unpack_ipv4: if an IPv4-mapped address is found,
return the plain IPv4 address (default=False).
error_message: An error message used in the ValidationError.
Return a compressed IPv6 address or the same value.
"""
try:
addr = ipaddress.IPv6Address(int(ipaddress.IPv6Address(ip_str)))
except ValueError:
raise ValidationError(error_message, code='invalid')
if unpack_ipv4 and addr.ipv4_mapped:
return str(addr.ipv4_mapped)
elif addr.ipv4_mapped:
return '::ffff:%s' % str(addr.ipv4_mapped)
return str(addr)
def is_valid_ipv6_address(ip_str):
"""
Return whether or not the `ip_str` string is a valid IPv6 address.
"""
try:
ipaddress.IPv6Address(ip_str)
except ValueError:
return False
return True
| bsd-3-clause | 4,686,428,403,614,969,000 | 28.347826 | 77 | 0.668148 | false |
noba3/KoTos | addons/script.module.youtube.dl/lib/youtube_dl/extractor/tagesschau.py | 102 | 6530 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import parse_filesize
class TagesschauIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?tagesschau\.de/multimedia/(?:[^/]+/)*?[^/#?]+?(?P<id>-?[0-9]+)(?:~_[^/#?]+?)?\.html'
_TESTS = [{
'url': 'http://www.tagesschau.de/multimedia/video/video-102143.html',
'md5': '917a228bc7df7850783bc47979673a09',
'info_dict': {
'id': '102143',
'ext': 'mp4',
'title': 'Regierungsumbildung in Athen: Neue Minister in Griechenland vereidigt',
'description': 'md5:171feccd9d9b3dd54d05d501568f6359',
'thumbnail': 're:^https?:.*\.jpg$',
},
}, {
'url': 'http://www.tagesschau.de/multimedia/sendung/ts-5727.html',
'md5': '3c54c1f6243d279b706bde660ceec633',
'info_dict': {
'id': '5727',
'ext': 'mp4',
'description': 'md5:695c01bfd98b7e313c501386327aea59',
'title': 'Sendung: tagesschau \t04.12.2014 20:00 Uhr',
'thumbnail': 're:^https?:.*\.jpg$',
},
}, {
'url': 'http://www.tagesschau.de/multimedia/politikimradio/audio-18407.html',
'md5': 'aef45de271c4bf0a5db834aa40bf774c',
'info_dict': {
'id': '18407',
'ext': 'mp3',
'title': 'Flüchtlingsdebatte: Hitzig, aber wenig hilfreich',
'description': 'Flüchtlingsdebatte: Hitzig, aber wenig hilfreich',
'thumbnail': 're:^https?:.*\.jpg$',
},
}, {
'url': 'http://www.tagesschau.de/multimedia/sendung/tsg-3771.html',
'only_matching': True,
}, {
'url': 'http://www.tagesschau.de/multimedia/sendung/tt-3827.html',
'only_matching': True,
}, {
'url': 'http://www.tagesschau.de/multimedia/sendung/nm-3475.html',
'only_matching': True,
}, {
'url': 'http://www.tagesschau.de/multimedia/sendung/weltspiegel-3167.html',
'only_matching': True,
}, {
'url': 'http://www.tagesschau.de/multimedia/tsvorzwanzig-959.html',
'only_matching': True,
}, {
'url': 'http://www.tagesschau.de/multimedia/sendung/bab/bab-3299~_bab-sendung-209.html',
'only_matching': True,
}, {
'url': 'http://www.tagesschau.de/multimedia/video/video-102303~_bab-sendung-211.html',
'only_matching': True,
}]
_FORMATS = {
's': {'width': 256, 'height': 144, 'quality': 1},
'm': {'width': 512, 'height': 288, 'quality': 2},
'l': {'width': 960, 'height': 544, 'quality': 3},
}
def _real_extract(self, url):
video_id = self._match_id(url)
display_id = video_id.lstrip('-')
webpage = self._download_webpage(url, display_id)
player_url = self._html_search_meta(
'twitter:player', webpage, 'player URL', default=None)
if player_url:
playerpage = self._download_webpage(
player_url, display_id, 'Downloading player page')
formats = []
for media in re.finditer(
r'''(?x)
(?P<q_url>["\'])(?P<url>http://media.+?)(?P=q_url)
,\s*type:(?P<q_type>["\'])(?P<type>video|audio)/(?P<ext>.+?)(?P=q_type)
(?:,\s*quality:(?P<q_quality>["\'])(?P<quality>.+?)(?P=q_quality))?
''', playerpage):
url = media.group('url')
type_ = media.group('type')
ext = media.group('ext')
res = media.group('quality')
f = {
'format_id': '%s_%s' % (res, ext) if res else ext,
'url': url,
'ext': ext,
'vcodec': 'none' if type_ == 'audio' else None,
}
f.update(self._FORMATS.get(res, {}))
formats.append(f)
thumbnail = self._og_search_thumbnail(playerpage)
title = self._og_search_title(webpage).strip()
description = self._og_search_description(webpage).strip()
else:
download_text = self._search_regex(
r'(?s)<p>Wir bieten dieses Video in folgenden Formaten zum Download an:</p>\s*<div class="controls">(.*?)</div>\s*<p>',
webpage, 'download links')
links = re.finditer(
r'<div class="button" title="(?P<title>[^"]*)"><a href="(?P<url>[^"]+)">(?P<name>.+?)</a></div>',
download_text)
formats = []
for l in links:
format_id = self._search_regex(
r'.*/[^/.]+\.([^/]+)\.[^/.]+', l.group('url'), 'format ID')
format = {
'format_id': format_id,
'url': l.group('url'),
'format_name': l.group('name'),
}
m = re.match(
r'''(?x)
Video:\s*(?P<vcodec>[a-zA-Z0-9/._-]+)\s*&\#10;
(?P<width>[0-9]+)x(?P<height>[0-9]+)px&\#10;
(?P<vbr>[0-9]+)kbps&\#10;
Audio:\s*(?P<abr>[0-9]+)kbps,\s*(?P<audio_desc>[A-Za-z\.0-9]+)&\#10;
Größe:\s*(?P<filesize_approx>[0-9.,]+\s+[a-zA-Z]*B)''',
l.group('title'))
if m:
format.update({
'format_note': m.group('audio_desc'),
'vcodec': m.group('vcodec'),
'width': int(m.group('width')),
'height': int(m.group('height')),
'abr': int(m.group('abr')),
'vbr': int(m.group('vbr')),
'filesize_approx': parse_filesize(m.group('filesize_approx')),
})
formats.append(format)
thumbnail = self._og_search_thumbnail(webpage)
description = self._html_search_regex(
r'(?s)<p class="teasertext">(.*?)</p>',
webpage, 'description', default=None)
title = self._html_search_regex(
r'<span class="headline".*?>(.*?)</span>', webpage, 'title')
self._sort_formats(formats)
return {
'id': display_id,
'title': title,
'thumbnail': thumbnail,
'formats': formats,
'description': description,
}
| gpl-2.0 | 7,904,568,288,801,026,000 | 41.116129 | 135 | 0.469516 | false |
hackerbot/DjangoDev | django/forms/fields.py | 34 | 48292 | """
Field classes.
"""
from __future__ import unicode_literals
import copy
import datetime
import os
import re
import sys
import uuid
import warnings
from decimal import Decimal, DecimalException
from io import BytesIO
from django.core import validators
from django.core.exceptions import ValidationError
# Provide this import for backwards compatibility.
from django.core.validators import EMPTY_VALUES # NOQA
from django.forms.utils import from_current_timezone, to_current_timezone
from django.forms.widgets import (
FILE_INPUT_CONTRADICTION, CheckboxInput, ClearableFileInput, DateInput,
DateTimeInput, EmailInput, HiddenInput, MultipleHiddenInput,
NullBooleanSelect, NumberInput, Select, SelectMultiple,
SplitDateTimeWidget, SplitHiddenDateTimeWidget, TextInput, TimeInput,
URLInput,
)
from django.utils import formats, six
from django.utils.dateparse import parse_duration
from django.utils.deprecation import (
RemovedInDjango20Warning, RenameMethodsBase,
)
from django.utils.duration import duration_string
from django.utils.encoding import force_str, force_text, smart_text
from django.utils.ipv6 import clean_ipv6_address
from django.utils.six.moves.urllib.parse import urlsplit, urlunsplit
from django.utils.translation import ugettext_lazy as _, ungettext_lazy
__all__ = (
'Field', 'CharField', 'IntegerField',
'DateField', 'TimeField', 'DateTimeField', 'DurationField',
'RegexField', 'EmailField', 'FileField', 'ImageField', 'URLField',
'BooleanField', 'NullBooleanField', 'ChoiceField', 'MultipleChoiceField',
'ComboField', 'MultiValueField', 'FloatField', 'DecimalField',
'SplitDateTimeField', 'GenericIPAddressField', 'FilePathField',
'SlugField', 'TypedChoiceField', 'TypedMultipleChoiceField', 'UUIDField',
)
class RenameFieldMethods(RenameMethodsBase):
renamed_methods = (
('_has_changed', 'has_changed', RemovedInDjango20Warning),
)
class Field(six.with_metaclass(RenameFieldMethods, object)):
widget = TextInput # Default widget to use when rendering this type of Field.
hidden_widget = HiddenInput # Default widget to use when rendering this as "hidden".
default_validators = [] # Default set of validators
# Add an 'invalid' entry to default_error_message if you want a specific
# field error message not raised by the field validators.
default_error_messages = {
'required': _('This field is required.'),
}
empty_values = list(validators.EMPTY_VALUES)
# Tracks each time a Field instance is created. Used to retain order.
creation_counter = 0
def __init__(self, required=True, widget=None, label=None, initial=None,
help_text='', error_messages=None, show_hidden_initial=False,
validators=[], localize=False, label_suffix=None):
# required -- Boolean that specifies whether the field is required.
# True by default.
# widget -- A Widget class, or instance of a Widget class, that should
# be used for this Field when displaying it. Each Field has a
# default Widget that it'll use if you don't specify this. In
# most cases, the default widget is TextInput.
# label -- A verbose name for this field, for use in displaying this
# field in a form. By default, Django will use a "pretty"
# version of the form field name, if the Field is part of a
# Form.
# initial -- A value to use in this Field's initial display. This value
# is *not* used as a fallback if data isn't given.
# help_text -- An optional string to use as "help text" for this Field.
# error_messages -- An optional dictionary to override the default
# messages that the field will raise.
# show_hidden_initial -- Boolean that specifies if it is needed to render a
# hidden widget with initial value after widget.
# validators -- List of additional validators to use
# localize -- Boolean that specifies if the field should be localized.
# label_suffix -- Suffix to be added to the label. Overrides
# form's label_suffix.
self.required, self.label, self.initial = required, label, initial
self.show_hidden_initial = show_hidden_initial
self.help_text = help_text
self.label_suffix = label_suffix
widget = widget or self.widget
if isinstance(widget, type):
widget = widget()
# Trigger the localization machinery if needed.
self.localize = localize
if self.localize:
widget.is_localized = True
# Let the widget know whether it should display as required.
widget.is_required = self.required
# Hook into self.widget_attrs() for any Field-specific HTML attributes.
extra_attrs = self.widget_attrs(widget)
if extra_attrs:
widget.attrs.update(extra_attrs)
self.widget = widget
# Increase the creation counter, and save our local copy.
self.creation_counter = Field.creation_counter
Field.creation_counter += 1
messages = {}
for c in reversed(self.__class__.__mro__):
messages.update(getattr(c, 'default_error_messages', {}))
messages.update(error_messages or {})
self.error_messages = messages
self.validators = self.default_validators + validators
super(Field, self).__init__()
def prepare_value(self, value):
return value
def to_python(self, value):
return value
def validate(self, value):
if value in self.empty_values and self.required:
raise ValidationError(self.error_messages['required'], code='required')
def run_validators(self, value):
if value in self.empty_values:
return
errors = []
for v in self.validators:
try:
v(value)
except ValidationError as e:
if hasattr(e, 'code') and e.code in self.error_messages:
e.message = self.error_messages[e.code]
errors.extend(e.error_list)
if errors:
raise ValidationError(errors)
def clean(self, value):
"""
Validates the given value and returns its "cleaned" value as an
appropriate Python object.
Raises ValidationError for any errors.
"""
value = self.to_python(value)
self.validate(value)
self.run_validators(value)
return value
def bound_data(self, data, initial):
"""
Return the value that should be shown for this field on render of a
bound form, given the submitted POST data for the field and the initial
data, if any.
For most fields, this will simply be data; FileFields need to handle it
a bit differently.
"""
return data
def widget_attrs(self, widget):
"""
Given a Widget instance (*not* a Widget class), returns a dictionary of
any HTML attributes that should be added to the Widget, based on this
Field.
"""
return {}
def has_changed(self, initial, data):
"""
Return True if data differs from initial.
"""
# For purposes of seeing whether something has changed, None is
# the same as an empty string, if the data or initial value we get
# is None, replace it w/ ''.
initial_value = initial if initial is not None else ''
try:
data = self.to_python(data)
if hasattr(self, '_coerce'):
data = self._coerce(data)
initial_value = self._coerce(initial_value)
except ValidationError:
return True
data_value = data if data is not None else ''
return initial_value != data_value
def __deepcopy__(self, memo):
result = copy.copy(self)
memo[id(self)] = result
result.widget = copy.deepcopy(self.widget, memo)
result.validators = self.validators[:]
return result
class CharField(Field):
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
self.max_length, self.min_length = max_length, min_length
super(CharField, self).__init__(*args, **kwargs)
if min_length is not None:
self.validators.append(validators.MinLengthValidator(int(min_length)))
if max_length is not None:
self.validators.append(validators.MaxLengthValidator(int(max_length)))
def to_python(self, value):
"Returns a Unicode object."
if value in self.empty_values:
return ''
return smart_text(value)
def widget_attrs(self, widget):
attrs = super(CharField, self).widget_attrs(widget)
if self.max_length is not None:
# The HTML attribute is maxlength, not max_length.
attrs.update({'maxlength': str(self.max_length)})
return attrs
class IntegerField(Field):
widget = NumberInput
default_error_messages = {
'invalid': _('Enter a whole number.'),
}
re_decimal = re.compile(r'\.0*\s*$')
def __init__(self, max_value=None, min_value=None, *args, **kwargs):
self.max_value, self.min_value = max_value, min_value
if kwargs.get('localize') and self.widget == NumberInput:
# Localized number input is not well supported on most browsers
kwargs.setdefault('widget', super(IntegerField, self).widget)
super(IntegerField, self).__init__(*args, **kwargs)
if max_value is not None:
self.validators.append(validators.MaxValueValidator(max_value))
if min_value is not None:
self.validators.append(validators.MinValueValidator(min_value))
def to_python(self, value):
"""
Validates that int() can be called on the input. Returns the result
of int(). Returns None for empty values.
"""
value = super(IntegerField, self).to_python(value)
if value in self.empty_values:
return None
if self.localize:
value = formats.sanitize_separators(value)
# Strip trailing decimal and zeros.
try:
value = int(self.re_decimal.sub('', str(value)))
except (ValueError, TypeError):
raise ValidationError(self.error_messages['invalid'], code='invalid')
return value
def widget_attrs(self, widget):
attrs = super(IntegerField, self).widget_attrs(widget)
if isinstance(widget, NumberInput):
if self.min_value is not None:
attrs['min'] = self.min_value
if self.max_value is not None:
attrs['max'] = self.max_value
return attrs
class FloatField(IntegerField):
default_error_messages = {
'invalid': _('Enter a number.'),
}
def to_python(self, value):
"""
Validates that float() can be called on the input. Returns the result
of float(). Returns None for empty values.
"""
value = super(IntegerField, self).to_python(value)
if value in self.empty_values:
return None
if self.localize:
value = formats.sanitize_separators(value)
try:
value = float(value)
except (ValueError, TypeError):
raise ValidationError(self.error_messages['invalid'], code='invalid')
return value
def validate(self, value):
super(FloatField, self).validate(value)
# Check for NaN (which is the only thing not equal to itself) and +/- infinity
if value != value or value in (Decimal('Inf'), Decimal('-Inf')):
raise ValidationError(self.error_messages['invalid'], code='invalid')
return value
def widget_attrs(self, widget):
attrs = super(FloatField, self).widget_attrs(widget)
if isinstance(widget, NumberInput) and 'step' not in widget.attrs:
attrs.setdefault('step', 'any')
return attrs
class DecimalField(IntegerField):
default_error_messages = {
'invalid': _('Enter a number.'),
'max_digits': ungettext_lazy(
'Ensure that there are no more than %(max)s digit in total.',
'Ensure that there are no more than %(max)s digits in total.',
'max'),
'max_decimal_places': ungettext_lazy(
'Ensure that there are no more than %(max)s decimal place.',
'Ensure that there are no more than %(max)s decimal places.',
'max'),
'max_whole_digits': ungettext_lazy(
'Ensure that there are no more than %(max)s digit before the decimal point.',
'Ensure that there are no more than %(max)s digits before the decimal point.',
'max'),
}
def __init__(self, max_value=None, min_value=None, max_digits=None, decimal_places=None, *args, **kwargs):
self.max_digits, self.decimal_places = max_digits, decimal_places
super(DecimalField, self).__init__(max_value, min_value, *args, **kwargs)
def to_python(self, value):
"""
Validates that the input is a decimal number. Returns a Decimal
instance. Returns None for empty values. Ensures that there are no more
than max_digits in the number, and no more than decimal_places digits
after the decimal point.
"""
if value in self.empty_values:
return None
if self.localize:
value = formats.sanitize_separators(value)
value = smart_text(value).strip()
try:
value = Decimal(value)
except DecimalException:
raise ValidationError(self.error_messages['invalid'], code='invalid')
return value
def validate(self, value):
super(DecimalField, self).validate(value)
if value in self.empty_values:
return
# Check for NaN, Inf and -Inf values. We can't compare directly for NaN,
# since it is never equal to itself. However, NaN is the only value that
# isn't equal to itself, so we can use this to identify NaN
if value != value or value == Decimal("Inf") or value == Decimal("-Inf"):
raise ValidationError(self.error_messages['invalid'], code='invalid')
sign, digittuple, exponent = value.as_tuple()
decimals = abs(exponent)
# digittuple doesn't include any leading zeros.
digits = len(digittuple)
if decimals > digits:
# We have leading zeros up to or past the decimal point. Count
# everything past the decimal point as a digit. We do not count
# 0 before the decimal point as a digit since that would mean
# we would not allow max_digits = decimal_places.
digits = decimals
whole_digits = digits - decimals
if self.max_digits is not None and digits > self.max_digits:
raise ValidationError(
self.error_messages['max_digits'],
code='max_digits',
params={'max': self.max_digits},
)
if self.decimal_places is not None and decimals > self.decimal_places:
raise ValidationError(
self.error_messages['max_decimal_places'],
code='max_decimal_places',
params={'max': self.decimal_places},
)
if (self.max_digits is not None and self.decimal_places is not None
and whole_digits > (self.max_digits - self.decimal_places)):
raise ValidationError(
self.error_messages['max_whole_digits'],
code='max_whole_digits',
params={'max': (self.max_digits - self.decimal_places)},
)
return value
def widget_attrs(self, widget):
attrs = super(DecimalField, self).widget_attrs(widget)
if isinstance(widget, NumberInput) and 'step' not in widget.attrs:
if self.decimal_places is not None:
# Use exponential notation for small values since they might
# be parsed as 0 otherwise. ref #20765
step = str(Decimal('1') / 10 ** self.decimal_places).lower()
else:
step = 'any'
attrs.setdefault('step', step)
return attrs
class BaseTemporalField(Field):
def __init__(self, input_formats=None, *args, **kwargs):
super(BaseTemporalField, self).__init__(*args, **kwargs)
if input_formats is not None:
self.input_formats = input_formats
def to_python(self, value):
# Try to coerce the value to unicode.
unicode_value = force_text(value, strings_only=True)
if isinstance(unicode_value, six.text_type):
value = unicode_value.strip()
# If unicode, try to strptime against each input format.
if isinstance(value, six.text_type):
for format in self.input_formats:
try:
return self.strptime(value, format)
except (ValueError, TypeError):
continue
raise ValidationError(self.error_messages['invalid'], code='invalid')
def strptime(self, value, format):
raise NotImplementedError('Subclasses must define this method.')
class DateField(BaseTemporalField):
widget = DateInput
input_formats = formats.get_format_lazy('DATE_INPUT_FORMATS')
default_error_messages = {
'invalid': _('Enter a valid date.'),
}
def to_python(self, value):
"""
Validates that the input can be converted to a date. Returns a Python
datetime.date object.
"""
if value in self.empty_values:
return None
if isinstance(value, datetime.datetime):
return value.date()
if isinstance(value, datetime.date):
return value
return super(DateField, self).to_python(value)
def strptime(self, value, format):
return datetime.datetime.strptime(force_str(value), format).date()
class TimeField(BaseTemporalField):
widget = TimeInput
input_formats = formats.get_format_lazy('TIME_INPUT_FORMATS')
default_error_messages = {
'invalid': _('Enter a valid time.')
}
def to_python(self, value):
"""
Validates that the input can be converted to a time. Returns a Python
datetime.time object.
"""
if value in self.empty_values:
return None
if isinstance(value, datetime.time):
return value
return super(TimeField, self).to_python(value)
def strptime(self, value, format):
return datetime.datetime.strptime(force_str(value), format).time()
class DateTimeField(BaseTemporalField):
widget = DateTimeInput
input_formats = formats.get_format_lazy('DATETIME_INPUT_FORMATS')
default_error_messages = {
'invalid': _('Enter a valid date/time.'),
}
def prepare_value(self, value):
if isinstance(value, datetime.datetime):
value = to_current_timezone(value)
return value
def to_python(self, value):
"""
Validates that the input can be converted to a datetime. Returns a
Python datetime.datetime object.
"""
if value in self.empty_values:
return None
if isinstance(value, datetime.datetime):
return from_current_timezone(value)
if isinstance(value, datetime.date):
result = datetime.datetime(value.year, value.month, value.day)
return from_current_timezone(result)
result = super(DateTimeField, self).to_python(value)
return from_current_timezone(result)
def strptime(self, value, format):
return datetime.datetime.strptime(force_str(value), format)
class DurationField(Field):
default_error_messages = {
'invalid': _('Enter a valid duration.'),
}
def prepare_value(self, value):
if isinstance(value, datetime.timedelta):
return duration_string(value)
return value
def to_python(self, value):
if value in self.empty_values:
return None
if isinstance(value, datetime.timedelta):
return value
value = parse_duration(value)
if value is None:
raise ValidationError(self.error_messages['invalid'], code='invalid')
return value
class RegexField(CharField):
def __init__(self, regex, max_length=None, min_length=None, error_message=None, *args, **kwargs):
"""
regex can be either a string or a compiled regular expression object.
error_message is an optional error message to use, if
'Enter a valid value' is too generic for you.
"""
# error_message is just kept for backwards compatibility:
if error_message is not None:
warnings.warn(
"The 'error_message' argument is deprecated. Use "
"Field.error_messages['invalid'] instead.",
RemovedInDjango20Warning, stacklevel=2
)
error_messages = kwargs.get('error_messages') or {}
error_messages['invalid'] = error_message
kwargs['error_messages'] = error_messages
super(RegexField, self).__init__(max_length, min_length, *args, **kwargs)
self._set_regex(regex)
def _get_regex(self):
return self._regex
def _set_regex(self, regex):
if isinstance(regex, six.string_types):
regex = re.compile(regex, re.UNICODE)
self._regex = regex
if hasattr(self, '_regex_validator') and self._regex_validator in self.validators:
self.validators.remove(self._regex_validator)
self._regex_validator = validators.RegexValidator(regex=regex)
self.validators.append(self._regex_validator)
regex = property(_get_regex, _set_regex)
class EmailField(CharField):
widget = EmailInput
default_validators = [validators.validate_email]
def clean(self, value):
value = self.to_python(value).strip()
return super(EmailField, self).clean(value)
class FileField(Field):
widget = ClearableFileInput
default_error_messages = {
'invalid': _("No file was submitted. Check the encoding type on the form."),
'missing': _("No file was submitted."),
'empty': _("The submitted file is empty."),
'max_length': ungettext_lazy(
'Ensure this filename has at most %(max)d character (it has %(length)d).',
'Ensure this filename has at most %(max)d characters (it has %(length)d).',
'max'),
'contradiction': _('Please either submit a file or check the clear checkbox, not both.')
}
def __init__(self, *args, **kwargs):
self.max_length = kwargs.pop('max_length', None)
self.allow_empty_file = kwargs.pop('allow_empty_file', False)
super(FileField, self).__init__(*args, **kwargs)
def to_python(self, data):
if data in self.empty_values:
return None
# UploadedFile objects should have name and size attributes.
try:
file_name = data.name
file_size = data.size
except AttributeError:
raise ValidationError(self.error_messages['invalid'], code='invalid')
if self.max_length is not None and len(file_name) > self.max_length:
params = {'max': self.max_length, 'length': len(file_name)}
raise ValidationError(self.error_messages['max_length'], code='max_length', params=params)
if not file_name:
raise ValidationError(self.error_messages['invalid'], code='invalid')
if not self.allow_empty_file and not file_size:
raise ValidationError(self.error_messages['empty'], code='empty')
return data
def clean(self, data, initial=None):
# If the widget got contradictory inputs, we raise a validation error
if data is FILE_INPUT_CONTRADICTION:
raise ValidationError(self.error_messages['contradiction'], code='contradiction')
# False means the field value should be cleared; further validation is
# not needed.
if data is False:
if not self.required:
return False
# If the field is required, clearing is not possible (the widget
# shouldn't return False data in that case anyway). False is not
# in self.empty_value; if a False value makes it this far
# it should be validated from here on out as None (so it will be
# caught by the required check).
data = None
if not data and initial:
return initial
return super(FileField, self).clean(data)
def bound_data(self, data, initial):
if data in (None, FILE_INPUT_CONTRADICTION):
return initial
return data
def has_changed(self, initial, data):
if data is None:
return False
return True
class ImageField(FileField):
default_error_messages = {
'invalid_image': _(
"Upload a valid image. The file you uploaded was either not an "
"image or a corrupted image."
),
}
def to_python(self, data):
"""
Checks that the file-upload field data contains a valid image (GIF, JPG,
PNG, possibly others -- whatever the Python Imaging Library supports).
"""
f = super(ImageField, self).to_python(data)
if f is None:
return None
from PIL import Image
# We need to get a file object for Pillow. We might have a path or we might
# have to read the data into memory.
if hasattr(data, 'temporary_file_path'):
file = data.temporary_file_path()
else:
if hasattr(data, 'read'):
file = BytesIO(data.read())
else:
file = BytesIO(data['content'])
try:
# load() could spot a truncated JPEG, but it loads the entire
# image in memory, which is a DoS vector. See #3848 and #18520.
image = Image.open(file)
# verify() must be called immediately after the constructor.
image.verify()
# Annotating so subclasses can reuse it for their own validation
f.image = image
f.content_type = Image.MIME[image.format]
except Exception:
# Pillow doesn't recognize it as an image.
six.reraise(ValidationError, ValidationError(
self.error_messages['invalid_image'],
code='invalid_image',
), sys.exc_info()[2])
if hasattr(f, 'seek') and callable(f.seek):
f.seek(0)
return f
class URLField(CharField):
widget = URLInput
default_error_messages = {
'invalid': _('Enter a valid URL.'),
}
default_validators = [validators.URLValidator()]
def to_python(self, value):
def split_url(url):
"""
Returns a list of url parts via ``urlparse.urlsplit`` (or raises a
``ValidationError`` exception for certain).
"""
try:
return list(urlsplit(url))
except ValueError:
# urlparse.urlsplit can raise a ValueError with some
# misformatted URLs.
raise ValidationError(self.error_messages['invalid'], code='invalid')
value = super(URLField, self).to_python(value)
if value:
url_fields = split_url(value)
if not url_fields[0]:
# If no URL scheme given, assume http://
url_fields[0] = 'http'
if not url_fields[1]:
# Assume that if no domain is provided, that the path segment
# contains the domain.
url_fields[1] = url_fields[2]
url_fields[2] = ''
# Rebuild the url_fields list, since the domain segment may now
# contain the path too.
url_fields = split_url(urlunsplit(url_fields))
value = urlunsplit(url_fields)
return value
def clean(self, value):
value = self.to_python(value).strip()
return super(URLField, self).clean(value)
class BooleanField(Field):
widget = CheckboxInput
def to_python(self, value):
"""Returns a Python boolean object."""
# Explicitly check for the string 'False', which is what a hidden field
# will submit for False. Also check for '0', since this is what
# RadioSelect will provide. Because bool("True") == bool('1') == True,
# we don't need to handle that explicitly.
if isinstance(value, six.string_types) and value.lower() in ('false', '0'):
value = False
else:
value = bool(value)
return super(BooleanField, self).to_python(value)
def validate(self, value):
if not value and self.required:
raise ValidationError(self.error_messages['required'], code='required')
def has_changed(self, initial, data):
# Sometimes data or initial could be None or '' which should be the
# same thing as False.
if initial == 'False':
# show_hidden_initial may have transformed False to 'False'
initial = False
return bool(initial) != bool(data)
class NullBooleanField(BooleanField):
"""
A field whose valid values are None, True and False. Invalid values are
cleaned to None.
"""
widget = NullBooleanSelect
def to_python(self, value):
"""
Explicitly checks for the string 'True' and 'False', which is what a
hidden field will submit for True and False, for 'true' and 'false',
which are likely to be returned by JavaScript serializations of forms,
and for '1' and '0', which is what a RadioField will submit. Unlike
the Booleanfield we need to explicitly check for True, because we are
not using the bool() function
"""
if value in (True, 'True', 'true', '1'):
return True
elif value in (False, 'False', 'false', '0'):
return False
else:
return None
def validate(self, value):
pass
def has_changed(self, initial, data):
# None (unknown) and False (No) are not the same
if initial is not None:
initial = bool(initial)
if data is not None:
data = bool(data)
return initial != data
class CallableChoiceIterator(object):
def __init__(self, choices_func):
self.choices_func = choices_func
def __iter__(self):
for e in self.choices_func():
yield e
class ChoiceField(Field):
widget = Select
default_error_messages = {
'invalid_choice': _('Select a valid choice. %(value)s is not one of the available choices.'),
}
def __init__(self, choices=(), required=True, widget=None, label=None,
initial=None, help_text='', *args, **kwargs):
super(ChoiceField, self).__init__(required=required, widget=widget, label=label,
initial=initial, help_text=help_text, *args, **kwargs)
self.choices = choices
def __deepcopy__(self, memo):
result = super(ChoiceField, self).__deepcopy__(memo)
result._choices = copy.deepcopy(self._choices, memo)
return result
def _get_choices(self):
return self._choices
def _set_choices(self, value):
# Setting choices also sets the choices on the widget.
# choices can be any iterable, but we call list() on it because
# it will be consumed more than once.
if callable(value):
value = CallableChoiceIterator(value)
else:
value = list(value)
self._choices = self.widget.choices = value
choices = property(_get_choices, _set_choices)
def to_python(self, value):
"Returns a Unicode object."
if value in self.empty_values:
return ''
return smart_text(value)
def validate(self, value):
"""
Validates that the input is in self.choices.
"""
super(ChoiceField, self).validate(value)
if value and not self.valid_value(value):
raise ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': value},
)
def valid_value(self, value):
"Check to see if the provided value is a valid choice"
text_value = force_text(value)
for k, v in self.choices:
if isinstance(v, (list, tuple)):
# This is an optgroup, so look inside the group for options
for k2, v2 in v:
if value == k2 or text_value == force_text(k2):
return True
else:
if value == k or text_value == force_text(k):
return True
return False
class TypedChoiceField(ChoiceField):
def __init__(self, *args, **kwargs):
self.coerce = kwargs.pop('coerce', lambda val: val)
self.empty_value = kwargs.pop('empty_value', '')
super(TypedChoiceField, self).__init__(*args, **kwargs)
def _coerce(self, value):
"""
Validate that the value can be coerced to the right type (if not empty).
"""
if value == self.empty_value or value in self.empty_values:
return self.empty_value
try:
value = self.coerce(value)
except (ValueError, TypeError, ValidationError):
raise ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': value},
)
return value
def clean(self, value):
value = super(TypedChoiceField, self).clean(value)
return self._coerce(value)
class MultipleChoiceField(ChoiceField):
hidden_widget = MultipleHiddenInput
widget = SelectMultiple
default_error_messages = {
'invalid_choice': _('Select a valid choice. %(value)s is not one of the available choices.'),
'invalid_list': _('Enter a list of values.'),
}
def to_python(self, value):
if not value:
return []
elif not isinstance(value, (list, tuple)):
raise ValidationError(self.error_messages['invalid_list'], code='invalid_list')
return [smart_text(val) for val in value]
def validate(self, value):
"""
Validates that the input is a list or tuple.
"""
if self.required and not value:
raise ValidationError(self.error_messages['required'], code='required')
# Validate that each value in the value list is in self.choices.
for val in value:
if not self.valid_value(val):
raise ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': val},
)
def has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = set(force_text(value) for value in initial)
data_set = set(force_text(value) for value in data)
return data_set != initial_set
class TypedMultipleChoiceField(MultipleChoiceField):
def __init__(self, *args, **kwargs):
self.coerce = kwargs.pop('coerce', lambda val: val)
self.empty_value = kwargs.pop('empty_value', [])
super(TypedMultipleChoiceField, self).__init__(*args, **kwargs)
def _coerce(self, value):
"""
Validates that the values are in self.choices and can be coerced to the
right type.
"""
if value == self.empty_value or value in self.empty_values:
return self.empty_value
new_value = []
for choice in value:
try:
new_value.append(self.coerce(choice))
except (ValueError, TypeError, ValidationError):
raise ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': choice},
)
return new_value
def clean(self, value):
value = super(TypedMultipleChoiceField, self).clean(value)
return self._coerce(value)
def validate(self, value):
if value != self.empty_value:
super(TypedMultipleChoiceField, self).validate(value)
elif self.required:
raise ValidationError(self.error_messages['required'], code='required')
class ComboField(Field):
"""
A Field whose clean() method calls multiple Field clean() methods.
"""
def __init__(self, fields=(), *args, **kwargs):
super(ComboField, self).__init__(*args, **kwargs)
# Set 'required' to False on the individual fields, because the
# required validation will be handled by ComboField, not by those
# individual fields.
for f in fields:
f.required = False
self.fields = fields
def clean(self, value):
"""
Validates the given value against all of self.fields, which is a
list of Field instances.
"""
super(ComboField, self).clean(value)
for field in self.fields:
value = field.clean(value)
return value
class MultiValueField(Field):
"""
A Field that aggregates the logic of multiple Fields.
Its clean() method takes a "decompressed" list of values, which are then
cleaned into a single value according to self.fields. Each value in
this list is cleaned by the corresponding field -- the first value is
cleaned by the first field, the second value is cleaned by the second
field, etc. Once all fields are cleaned, the list of clean values is
"compressed" into a single value.
Subclasses should not have to implement clean(). Instead, they must
implement compress(), which takes a list of valid values and returns a
"compressed" version of those values -- a single value.
You'll probably want to use this with MultiWidget.
"""
default_error_messages = {
'invalid': _('Enter a list of values.'),
'incomplete': _('Enter a complete value.'),
}
def __init__(self, fields=(), *args, **kwargs):
self.require_all_fields = kwargs.pop('require_all_fields', True)
super(MultiValueField, self).__init__(*args, **kwargs)
for f in fields:
f.error_messages.setdefault('incomplete',
self.error_messages['incomplete'])
if self.require_all_fields:
# Set 'required' to False on the individual fields, because the
# required validation will be handled by MultiValueField, not
# by those individual fields.
f.required = False
self.fields = fields
def __deepcopy__(self, memo):
result = super(MultiValueField, self).__deepcopy__(memo)
result.fields = tuple(x.__deepcopy__(memo) for x in self.fields)
return result
def validate(self, value):
pass
def clean(self, value):
"""
Validates every value in the given list. A value is validated against
the corresponding Field in self.fields.
For example, if this MultiValueField was instantiated with
fields=(DateField(), TimeField()), clean() would call
DateField.clean(value[0]) and TimeField.clean(value[1]).
"""
clean_data = []
errors = []
if not value or isinstance(value, (list, tuple)):
if not value or not [v for v in value if v not in self.empty_values]:
if self.required:
raise ValidationError(self.error_messages['required'], code='required')
else:
return self.compress([])
else:
raise ValidationError(self.error_messages['invalid'], code='invalid')
for i, field in enumerate(self.fields):
try:
field_value = value[i]
except IndexError:
field_value = None
if field_value in self.empty_values:
if self.require_all_fields:
# Raise a 'required' error if the MultiValueField is
# required and any field is empty.
if self.required:
raise ValidationError(self.error_messages['required'], code='required')
elif field.required:
# Otherwise, add an 'incomplete' error to the list of
# collected errors and skip field cleaning, if a required
# field is empty.
if field.error_messages['incomplete'] not in errors:
errors.append(field.error_messages['incomplete'])
continue
try:
clean_data.append(field.clean(field_value))
except ValidationError as e:
# Collect all validation errors in a single list, which we'll
# raise at the end of clean(), rather than raising a single
# exception for the first error we encounter. Skip duplicates.
errors.extend(m for m in e.error_list if m not in errors)
if errors:
raise ValidationError(errors)
out = self.compress(clean_data)
self.validate(out)
self.run_validators(out)
return out
def compress(self, data_list):
"""
Returns a single value for the given list of values. The values can be
assumed to be valid.
For example, if this MultiValueField was instantiated with
fields=(DateField(), TimeField()), this might return a datetime
object created by combining the date and time in data_list.
"""
raise NotImplementedError('Subclasses must implement this method.')
def has_changed(self, initial, data):
if initial is None:
initial = ['' for x in range(0, len(data))]
else:
if not isinstance(initial, list):
initial = self.widget.decompress(initial)
for field, initial, data in zip(self.fields, initial, data):
try:
initial = field.to_python(initial)
except ValidationError:
return True
if field.has_changed(initial, data):
return True
return False
class FilePathField(ChoiceField):
def __init__(self, path, match=None, recursive=False, allow_files=True,
allow_folders=False, required=True, widget=None, label=None,
initial=None, help_text='', *args, **kwargs):
self.path, self.match, self.recursive = path, match, recursive
self.allow_files, self.allow_folders = allow_files, allow_folders
super(FilePathField, self).__init__(choices=(), required=required,
widget=widget, label=label, initial=initial, help_text=help_text,
*args, **kwargs)
if self.required:
self.choices = []
else:
self.choices = [("", "---------")]
if self.match is not None:
self.match_re = re.compile(self.match)
if recursive:
for root, dirs, files in sorted(os.walk(self.path)):
if self.allow_files:
for f in files:
if self.match is None or self.match_re.search(f):
f = os.path.join(root, f)
self.choices.append((f, f.replace(path, "", 1)))
if self.allow_folders:
for f in dirs:
if f == '__pycache__':
continue
if self.match is None or self.match_re.search(f):
f = os.path.join(root, f)
self.choices.append((f, f.replace(path, "", 1)))
else:
try:
for f in sorted(os.listdir(self.path)):
if f == '__pycache__':
continue
full_file = os.path.join(self.path, f)
if (((self.allow_files and os.path.isfile(full_file)) or
(self.allow_folders and os.path.isdir(full_file))) and
(self.match is None or self.match_re.search(f))):
self.choices.append((full_file, f))
except OSError:
pass
self.widget.choices = self.choices
class SplitDateTimeField(MultiValueField):
widget = SplitDateTimeWidget
hidden_widget = SplitHiddenDateTimeWidget
default_error_messages = {
'invalid_date': _('Enter a valid date.'),
'invalid_time': _('Enter a valid time.'),
}
def __init__(self, input_date_formats=None, input_time_formats=None, *args, **kwargs):
errors = self.default_error_messages.copy()
if 'error_messages' in kwargs:
errors.update(kwargs['error_messages'])
localize = kwargs.get('localize', False)
fields = (
DateField(input_formats=input_date_formats,
error_messages={'invalid': errors['invalid_date']},
localize=localize),
TimeField(input_formats=input_time_formats,
error_messages={'invalid': errors['invalid_time']},
localize=localize),
)
super(SplitDateTimeField, self).__init__(fields, *args, **kwargs)
def compress(self, data_list):
if data_list:
# Raise a validation error if time or date is empty
# (possible if SplitDateTimeField has required=False).
if data_list[0] in self.empty_values:
raise ValidationError(self.error_messages['invalid_date'], code='invalid_date')
if data_list[1] in self.empty_values:
raise ValidationError(self.error_messages['invalid_time'], code='invalid_time')
result = datetime.datetime.combine(*data_list)
return from_current_timezone(result)
return None
class GenericIPAddressField(CharField):
def __init__(self, protocol='both', unpack_ipv4=False, *args, **kwargs):
self.unpack_ipv4 = unpack_ipv4
self.default_validators = validators.ip_address_validators(protocol, unpack_ipv4)[0]
super(GenericIPAddressField, self).__init__(*args, **kwargs)
def to_python(self, value):
if value in self.empty_values:
return ''
value = value.strip()
if value and ':' in value:
return clean_ipv6_address(value, self.unpack_ipv4)
return value
class SlugField(CharField):
default_validators = [validators.validate_slug]
def clean(self, value):
value = self.to_python(value).strip()
return super(SlugField, self).clean(value)
class UUIDField(CharField):
default_error_messages = {
'invalid': _('Enter a valid UUID.'),
}
def prepare_value(self, value):
if isinstance(value, uuid.UUID):
return value.hex
return value
def to_python(self, value):
value = super(UUIDField, self).to_python(value)
if value in self.empty_values:
return None
if not isinstance(value, uuid.UUID):
try:
value = uuid.UUID(value)
except ValueError:
raise ValidationError(self.error_messages['invalid'], code='invalid')
return value
| bsd-3-clause | 1,779,313,228,671,756,800 | 37.449045 | 110 | 0.596144 | false |
upconsulting/IsisCB | isiscb/zotero/migrations/0026_auto_20200601_0013.py | 1 | 4896 | # Generated by Django 3.0.5 on 2020-06-01 00:13
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('isisdata', '0091_auto_20200601_0013'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('zotero', '0025_importaccession_import_errors'),
]
operations = [
migrations.AlterField(
model_name='draftacrelation',
name='authority',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='citation_relations', to='zotero.DraftAuthority'),
),
migrations.AlterField(
model_name='draftacrelation',
name='citation',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='authority_relations', to='zotero.DraftCitation'),
),
migrations.AlterField(
model_name='draftacrelation',
name='imported_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='draftattribute',
name='imported_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='draftauthority',
name='imported_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='draftauthoritylinkeddata',
name='imported_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='draftccrelation',
name='imported_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='draftccrelation',
name='object',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='relations_to', to='zotero.DraftCitation'),
),
migrations.AlterField(
model_name='draftccrelation',
name='subject',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='relations_from', to='zotero.DraftCitation'),
),
migrations.AlterField(
model_name='draftcitation',
name='imported_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='draftcitation',
name='language',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='isisdata.Language'),
),
migrations.AlterField(
model_name='draftcitationlinkeddata',
name='imported_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='fieldresolutionevent',
name='for_model',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='fieldresolutions_for', to='contenttypes.ContentType'),
),
migrations.AlterField(
model_name='importaccession',
name='imported_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='importaccession',
name='ingest_to',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='isisdata.Dataset'),
),
migrations.AlterField(
model_name='instanceresolutionevent',
name='for_model',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='instanceresolutions_for', to='contenttypes.ContentType'),
),
migrations.AlterField(
model_name='instanceresolutionevent',
name='to_model',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='instanceresolutions_to', to='contenttypes.ContentType'),
),
]
| mit | 3,641,457,864,839,707,000 | 46.533981 | 164 | 0.634804 | false |
Microsoft/PTVS | Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/pip/_internal/download.py | 7 | 33335 | from __future__ import absolute_import
import cgi
import email.utils
import getpass
import json
import logging
import mimetypes
import os
import platform
import re
import shutil
import sys
from pip._vendor import requests, six, urllib3
from pip._vendor.cachecontrol import CacheControlAdapter
from pip._vendor.cachecontrol.caches import FileCache
from pip._vendor.lockfile import LockError
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
from pip._vendor.requests.structures import CaseInsensitiveDict
from pip._vendor.requests.utils import get_netrc_auth
# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
# why we ignore the type on this import
from pip._vendor.six.moves import xmlrpc_client # type: ignore
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote
from pip._vendor.urllib3.util import IS_PYOPENSSL
import pip
from pip._internal.compat import WINDOWS
from pip._internal.exceptions import HashMismatch, InstallationError
from pip._internal.locations import write_delete_marker_file
from pip._internal.models import PyPI
from pip._internal.utils.encoding import auto_decode
from pip._internal.utils.filesystem import check_path_owner
from pip._internal.utils.glibc import libc_ver
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (
ARCHIVE_EXTENSIONS, ask_path_exists, backup_dir, call_subprocess, consume,
display_path, format_size, get_installed_version, rmtree, splitext,
unpack_file,
)
from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.ui import DownloadProgressProvider
from pip._internal.vcs import vcs
try:
import ssl # noqa
except ImportError:
ssl = None
HAS_TLS = (ssl is not None) or IS_PYOPENSSL
__all__ = ['get_file_content',
'is_url', 'url_to_path', 'path_to_url',
'is_archive_file', 'unpack_vcs_link',
'unpack_file_url', 'is_vcs_url', 'is_file_url',
'unpack_http_url', 'unpack_url']
logger = logging.getLogger(__name__)
def user_agent():
"""
Return a string representing the user agent.
"""
data = {
"installer": {"name": "pip", "version": pip.__version__},
"python": platform.python_version(),
"implementation": {
"name": platform.python_implementation(),
},
}
if data["implementation"]["name"] == 'CPython':
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'PyPy':
if sys.pypy_version_info.releaselevel == 'final':
pypy_version_info = sys.pypy_version_info[:3]
else:
pypy_version_info = sys.pypy_version_info
data["implementation"]["version"] = ".".join(
[str(x) for x in pypy_version_info]
)
elif data["implementation"]["name"] == 'Jython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'IronPython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
if sys.platform.startswith("linux"):
from pip._vendor import distro
distro_infos = dict(filter(
lambda x: x[1],
zip(["name", "version", "id"], distro.linux_distribution()),
))
libc = dict(filter(
lambda x: x[1],
zip(["lib", "version"], libc_ver()),
))
if libc:
distro_infos["libc"] = libc
if distro_infos:
data["distro"] = distro_infos
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
if platform.system():
data.setdefault("system", {})["name"] = platform.system()
if platform.release():
data.setdefault("system", {})["release"] = platform.release()
if platform.machine():
data["cpu"] = platform.machine()
if HAS_TLS:
data["openssl_version"] = ssl.OPENSSL_VERSION
setuptools_version = get_installed_version("setuptools")
if setuptools_version is not None:
data["setuptools_version"] = setuptools_version
return "{data[installer][name]}/{data[installer][version]} {json}".format(
data=data,
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
)
class MultiDomainBasicAuth(AuthBase):
def __init__(self, prompting=True):
self.prompting = prompting
self.passwords = {}
def __call__(self, req):
parsed = urllib_parse.urlparse(req.url)
# Get the netloc without any embedded credentials
netloc = parsed.netloc.rsplit("@", 1)[-1]
# Set the url of the request to the url without any credentials
req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])
# Use any stored credentials that we have for this netloc
username, password = self.passwords.get(netloc, (None, None))
# Extract credentials embedded in the url if we have none stored
if username is None:
username, password = self.parse_credentials(parsed.netloc)
# Get creds from netrc if we still don't have them
if username is None and password is None:
netrc_auth = get_netrc_auth(req.url)
username, password = netrc_auth if netrc_auth else (None, None)
if username or password:
# Store the username and password
self.passwords[netloc] = (username, password)
# Send the basic auth with this request
req = HTTPBasicAuth(username or "", password or "")(req)
# Attach a hook to handle 401 responses
req.register_hook("response", self.handle_401)
return req
def handle_401(self, resp, **kwargs):
# We only care about 401 responses, anything else we want to just
# pass through the actual response
if resp.status_code != 401:
return resp
# We are not able to prompt the user so simply return the response
if not self.prompting:
return resp
parsed = urllib_parse.urlparse(resp.url)
# Prompt the user for a new username and password
username = six.moves.input("User for %s: " % parsed.netloc)
password = getpass.getpass("Password: ")
# Store the new username and password to use for future requests
if username or password:
self.passwords[parsed.netloc] = (username, password)
# Consume content and release the original connection to allow our new
# request to reuse the same one.
resp.content
resp.raw.release_conn()
# Add our new username and password to the request
req = HTTPBasicAuth(username or "", password or "")(resp.request)
# Send our new request
new_resp = resp.connection.send(req, **kwargs)
new_resp.history.append(resp)
return new_resp
def parse_credentials(self, netloc):
if "@" in netloc:
userinfo = netloc.rsplit("@", 1)[0]
if ":" in userinfo:
user, pwd = userinfo.split(":", 1)
return (urllib_unquote(user), urllib_unquote(pwd))
return urllib_unquote(userinfo), None
return None, None
class LocalFSAdapter(BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
class SafeFileCache(FileCache):
"""
A file based cache which is safe to use even when the target directory may
not be accessible or writable.
"""
def __init__(self, *args, **kwargs):
super(SafeFileCache, self).__init__(*args, **kwargs)
# Check to ensure that the directory containing our cache directory
# is owned by the user current executing pip. If it does not exist
# we will check the parent directory until we find one that does exist.
# If it is not owned by the user executing pip then we will disable
# the cache and log a warning.
if not check_path_owner(self.directory):
logger.warning(
"The directory '%s' or its parent directory is not owned by "
"the current user and the cache has been disabled. Please "
"check the permissions and owner of that directory. If "
"executing pip with sudo, you may want sudo's -H flag.",
self.directory,
)
# Set our directory to None to disable the Cache
self.directory = None
def get(self, *args, **kwargs):
# If we don't have a directory, then the cache should be a no-op.
if self.directory is None:
return
try:
return super(SafeFileCache, self).get(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
def set(self, *args, **kwargs):
# If we don't have a directory, then the cache should be a no-op.
if self.directory is None:
return
try:
return super(SafeFileCache, self).set(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
def delete(self, *args, **kwargs):
# If we don't have a directory, then the cache should be a no-op.
if self.directory is None:
return
try:
return super(SafeFileCache, self).delete(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
class InsecureHTTPAdapter(HTTPAdapter):
def cert_verify(self, conn, url, verify, cert):
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
class PipSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop("retries", 0)
cache = kwargs.pop("cache", None)
insecure_hosts = kwargs.pop("insecure_hosts", [])
super(PipSession, self).__init__(*args, **kwargs)
# Attach our User Agent to the request
self.headers["User-Agent"] = user_agent()
# Attach our Authentication handler to the session
self.auth = MultiDomainBasicAuth()
# Create our urllib3.Retry instance which will allow us to customize
# how we handle retries.
retries = urllib3.Retry(
# Set the total number of retries that a particular request can
# have.
total=retries,
# A 503 error from PyPI typically means that the Fastly -> Origin
# connection got interrupted in some way. A 503 error in general
# is typically considered a transient error so we'll go ahead and
# retry it.
# A 500 may indicate transient error in Amazon S3
# A 520 or 527 - may indicate transient error in CloudFlare
status_forcelist=[500, 503, 520, 527],
# Add a small amount of back off between failed requests in
# order to prevent hammering the service.
backoff_factor=0.25,
)
# We want to _only_ cache responses on securely fetched origins. We do
# this because we can't validate the response of an insecurely fetched
# origin, and we don't want someone to be able to poison the cache and
# require manual eviction from the cache to fix it.
if cache:
secure_adapter = CacheControlAdapter(
cache=SafeFileCache(cache, use_dir_lock=True),
max_retries=retries,
)
else:
secure_adapter = HTTPAdapter(max_retries=retries)
# Our Insecure HTTPAdapter disables HTTPS validation. It does not
# support caching (see above) so we'll use it for all http:// URLs as
# well as any https:// host that we've marked as ignoring TLS errors
# for.
insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
self.mount("https://", secure_adapter)
self.mount("http://", insecure_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# We want to use a non-validating adapter for any requests which are
# deemed insecure.
for host in insecure_hosts:
self.mount("https://{}/".format(host), insecure_adapter)
def request(self, method, url, *args, **kwargs):
# Allow setting a default timeout on a session
kwargs.setdefault("timeout", self.timeout)
# Dispatch the actual request
return super(PipSession, self).request(method, url, *args, **kwargs)
def get_file_content(url, comes_from=None, session=None):
"""Gets the content of a file; it may be a filename, file: URL, or
http: URL. Returns (location, content). Content is unicode.
:param url: File path or url.
:param comes_from: Origin description of requirements.
:param session: Instance of pip.download.PipSession.
"""
if session is None:
raise TypeError(
"get_file_content() missing 1 required keyword argument: 'session'"
)
match = _scheme_re.search(url)
if match:
scheme = match.group(1).lower()
if (scheme == 'file' and comes_from and
comes_from.startswith('http')):
raise InstallationError(
'Requirements file %s references URL %s, which is local'
% (comes_from, url))
if scheme == 'file':
path = url.split(':', 1)[1]
path = path.replace('\\', '/')
match = _url_slash_drive_re.match(path)
if match:
path = match.group(1) + ':' + path.split('|', 1)[1]
path = urllib_parse.unquote(path)
if path.startswith('/'):
path = '/' + path.lstrip('/')
url = path
else:
# FIXME: catch some errors
resp = session.get(url)
resp.raise_for_status()
return resp.url, resp.text
try:
with open(url, 'rb') as f:
content = auto_decode(f.read())
except IOError as exc:
raise InstallationError(
'Could not open requirements file: %s' % str(exc)
)
return url, content
_scheme_re = re.compile(r'^(http|https|file):', re.I)
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
def is_url(name):
"""Returns true if the name looks like a URL"""
if ':' not in name:
return False
scheme = name.split(':', 1)[0].lower()
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
_, netloc, path, _, _ = urllib_parse.urlsplit(url)
# if we have a UNC path, prepend UNC share notation
if netloc:
netloc = '\\\\' + netloc
path = urllib_request.url2pathname(netloc + path)
return path
def path_to_url(path):
"""
Convert a path to a file: URL. The path will be made absolute and have
quoted path parts.
"""
path = os.path.normpath(os.path.abspath(path))
url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
return url
def is_archive_file(name):
"""Return True if `name` is a considered as an archive file."""
ext = splitext(name)[1].lower()
if ext in ARCHIVE_EXTENSIONS:
return True
return False
def unpack_vcs_link(link, location):
vcs_backend = _get_used_vcs_backend(link)
vcs_backend.unpack(location)
def _get_used_vcs_backend(link):
for backend in vcs.backends:
if link.scheme in backend.schemes:
vcs_backend = backend(link.url)
return vcs_backend
def is_vcs_url(link):
return bool(_get_used_vcs_backend(link))
def is_file_url(link):
return link.url.lower().startswith('file:')
def is_dir_url(link):
"""Return whether a file:// Link points to a directory.
``link`` must not have any other scheme but file://. Call is_file_url()
first.
"""
link_path = url_to_path(link.url_without_fragment)
return os.path.isdir(link_path)
def _progress_indicator(iterable, *args, **kwargs):
return iterable
def _download_url(resp, link, content_file, hashes, progress_bar):
try:
total_length = int(resp.headers['content-length'])
except (ValueError, KeyError, TypeError):
total_length = 0
cached_resp = getattr(resp, "from_cache", False)
if logger.getEffectiveLevel() > logging.INFO:
show_progress = False
elif cached_resp:
show_progress = False
elif total_length > (40 * 1000):
show_progress = True
elif not total_length:
show_progress = True
else:
show_progress = False
show_url = link.show_url
def resp_read(chunk_size):
try:
# Special case for urllib3.
for chunk in resp.raw.stream(
chunk_size,
# We use decode_content=False here because we don't
# want urllib3 to mess with the raw bytes we get
# from the server. If we decompress inside of
# urllib3 then we cannot verify the checksum
# because the checksum will be of the compressed
# file. This breakage will only occur if the
# server adds a Content-Encoding header, which
# depends on how the server was configured:
# - Some servers will notice that the file isn't a
# compressible file and will leave the file alone
# and with an empty Content-Encoding
# - Some servers will notice that the file is
# already compressed and will leave the file
# alone and will add a Content-Encoding: gzip
# header
# - Some servers won't notice anything at all and
# will take a file that's already been compressed
# and compress it again and set the
# Content-Encoding: gzip header
#
# By setting this not to decode automatically we
# hope to eliminate problems with the second case.
decode_content=False):
yield chunk
except AttributeError:
# Standard file-like object.
while True:
chunk = resp.raw.read(chunk_size)
if not chunk:
break
yield chunk
def written_chunks(chunks):
for chunk in chunks:
content_file.write(chunk)
yield chunk
progress_indicator = _progress_indicator
if link.netloc == PyPI.netloc:
url = show_url
else:
url = link.url_without_fragment
if show_progress: # We don't show progress on cached responses
progress_indicator = DownloadProgressProvider(progress_bar,
max=total_length)
if total_length:
logger.info("Downloading %s (%s)", url, format_size(total_length))
else:
logger.info("Downloading %s", url)
elif cached_resp:
logger.info("Using cached %s", url)
else:
logger.info("Downloading %s", url)
logger.debug('Downloading from URL %s', link)
downloaded_chunks = written_chunks(
progress_indicator(
resp_read(CONTENT_CHUNK_SIZE),
CONTENT_CHUNK_SIZE
)
)
if hashes:
hashes.check_against_chunks(downloaded_chunks)
else:
consume(downloaded_chunks)
def _copy_file(filename, location, link):
copy = True
download_location = os.path.join(location, link.filename)
if os.path.exists(download_location):
response = ask_path_exists(
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
display_path(download_location), ('i', 'w', 'b', 'a'))
if response == 'i':
copy = False
elif response == 'w':
logger.warning('Deleting %s', display_path(download_location))
os.remove(download_location)
elif response == 'b':
dest_file = backup_dir(download_location)
logger.warning(
'Backing up %s to %s',
display_path(download_location),
display_path(dest_file),
)
shutil.move(download_location, dest_file)
elif response == 'a':
sys.exit(-1)
if copy:
shutil.copy(filename, download_location)
logger.info('Saved %s', display_path(download_location))
def unpack_http_url(link, location, download_dir=None,
session=None, hashes=None, progress_bar="on"):
if session is None:
raise TypeError(
"unpack_http_url() missing 1 required keyword argument: 'session'"
)
with TempDirectory(kind="unpack") as temp_dir:
# If a download dir is specified, is the file already downloaded there?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(link,
download_dir,
hashes)
if already_downloaded_path:
from_path = already_downloaded_path
content_type = mimetypes.guess_type(from_path)[0]
else:
# let's download to a tmp dir
from_path, content_type = _download_http_url(link,
session,
temp_dir.path,
hashes,
progress_bar)
# unpack the archive to the build dir location. even when only
# downloading archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type, link)
# a download dir is specified; let's copy the archive there
if download_dir and not already_downloaded_path:
_copy_file(from_path, download_dir, link)
if not already_downloaded_path:
os.unlink(from_path)
def unpack_file_url(link, location, download_dir=None, hashes=None):
"""Unpack link into location.
If download_dir is provided and link points to a file, make a copy
of the link file inside download_dir.
"""
link_path = url_to_path(link.url_without_fragment)
# If it's a url to a local directory
if is_dir_url(link):
if os.path.isdir(location):
rmtree(location)
shutil.copytree(link_path, location, symlinks=True)
if download_dir:
logger.info('Link is a directory, ignoring download_dir')
return
# If --require-hashes is off, `hashes` is either empty, the
# link's embedded hash, or MissingHashes; it is required to
# match. If --require-hashes is on, we are satisfied by any
# hash in `hashes` matching: a URL-based or an option-based
# one; no internet-sourced hash will be in `hashes`.
if hashes:
hashes.check_against_path(link_path)
# If a download dir is specified, is the file already there and valid?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(link,
download_dir,
hashes)
if already_downloaded_path:
from_path = already_downloaded_path
else:
from_path = link_path
content_type = mimetypes.guess_type(from_path)[0]
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type, link)
# a download dir is specified and not already downloaded
if download_dir and not already_downloaded_path:
_copy_file(from_path, download_dir, link)
def _copy_dist_from_dir(link_path, location):
"""Copy distribution files in `link_path` to `location`.
Invoked when user requests to install a local directory. E.g.:
pip install .
pip install ~/dev/git-repos/python-prompt-toolkit
"""
# Note: This is currently VERY SLOW if you have a lot of data in the
# directory, because it copies everything with `shutil.copytree`.
# What it should really do is build an sdist and install that.
# See https://github.com/pypa/pip/issues/2195
if os.path.isdir(location):
rmtree(location)
# build an sdist
setup_py = 'setup.py'
sdist_args = [sys.executable]
sdist_args.append('-c')
sdist_args.append(SETUPTOOLS_SHIM % setup_py)
sdist_args.append('sdist')
sdist_args += ['--dist-dir', location]
logger.info('Running setup.py sdist for %s', link_path)
with indent_log():
call_subprocess(sdist_args, cwd=link_path, show_stdout=False)
# unpack sdist into `location`
sdist = os.path.join(location, os.listdir(location)[0])
logger.info('Unpacking sdist %s into %s', sdist, location)
unpack_file(sdist, location, content_type=None, link=None)
class PipXmlrpcTransport(xmlrpc_client.Transport):
"""Provide a `xmlrpclib.Transport` implementation via a `PipSession`
object.
"""
def __init__(self, index_url, session, use_datetime=False):
xmlrpc_client.Transport.__init__(self, use_datetime)
index_parts = urllib_parse.urlparse(index_url)
self._scheme = index_parts.scheme
self._session = session
def request(self, host, handler, request_body, verbose=False):
parts = (self._scheme, host, handler, None, None, None)
url = urllib_parse.urlunparse(parts)
try:
headers = {'Content-Type': 'text/xml'}
response = self._session.post(url, data=request_body,
headers=headers, stream=True)
response.raise_for_status()
self.verbose = verbose
return self.parse_response(response.raw)
except requests.HTTPError as exc:
logger.critical(
"HTTP error %s while getting %s",
exc.response.status_code, url,
)
raise
def unpack_url(link, location, download_dir=None,
only_download=False, session=None, hashes=None,
progress_bar="on"):
"""Unpack link.
If link is a VCS link:
if only_download, export into download_dir and ignore location
else unpack into location
for other types of link:
- unpack into location
- if download_dir, copy the file into download_dir
- if only_download, mark location for deletion
:param hashes: A Hashes object, one of whose embedded hashes must match,
or HashMismatch will be raised. If the Hashes is empty, no matches are
required, and unhashable types of requirements (like VCS ones, which
would ordinarily raise HashUnsupported) are allowed.
"""
# non-editable vcs urls
if is_vcs_url(link):
unpack_vcs_link(link, location)
# file urls
elif is_file_url(link):
unpack_file_url(link, location, download_dir, hashes=hashes)
# http urls
else:
if session is None:
session = PipSession()
unpack_http_url(
link,
location,
download_dir,
session,
hashes=hashes,
progress_bar=progress_bar
)
if only_download:
write_delete_marker_file(location)
def _download_http_url(link, session, temp_dir, hashes, progress_bar):
"""Download link url into temp_dir using provided session"""
target_url = link.url.split('#', 1)[0]
try:
resp = session.get(
target_url,
# We use Accept-Encoding: identity here because requests
# defaults to accepting compressed responses. This breaks in
# a variety of ways depending on how the server is configured.
# - Some servers will notice that the file isn't a compressible
# file and will leave the file alone and with an empty
# Content-Encoding
# - Some servers will notice that the file is already
# compressed and will leave the file alone and will add a
# Content-Encoding: gzip header
# - Some servers won't notice anything at all and will take
# a file that's already been compressed and compress it again
# and set the Content-Encoding: gzip header
# By setting this to request only the identity encoding We're
# hoping to eliminate the third case. Hopefully there does not
# exist a server which when given a file will notice it is
# already compressed and that you're not asking for a
# compressed file and will then decompress it before sending
# because if that's the case I don't think it'll ever be
# possible to make this work.
headers={"Accept-Encoding": "identity"},
stream=True,
)
resp.raise_for_status()
except requests.HTTPError as exc:
logger.critical(
"HTTP error %s while getting %s", exc.response.status_code, link,
)
raise
content_type = resp.headers.get('content-type', '')
filename = link.filename # fallback
# Have a look at the Content-Disposition header for a better guess
content_disposition = resp.headers.get('content-disposition')
if content_disposition:
type, params = cgi.parse_header(content_disposition)
# We use ``or`` here because we don't want to use an "empty" value
# from the filename param.
filename = params.get('filename') or filename
ext = splitext(filename)[1]
if not ext:
ext = mimetypes.guess_extension(content_type)
if ext:
filename += ext
if not ext and link.url != resp.url:
ext = os.path.splitext(resp.url)[1]
if ext:
filename += ext
file_path = os.path.join(temp_dir, filename)
with open(file_path, 'wb') as content_file:
_download_url(resp, link, content_file, hashes, progress_bar)
return file_path, content_type
def _check_download_dir(link, download_dir, hashes):
""" Check download_dir for previously downloaded file with correct hash
If a correct file is found return its path else None
"""
download_path = os.path.join(download_dir, link.filename)
if os.path.exists(download_path):
# If already downloaded, does its hash match?
logger.info('File was already downloaded %s', download_path)
if hashes:
try:
hashes.check_against_path(download_path)
except HashMismatch:
logger.warning(
'Previously-downloaded file %s has bad hash. '
'Re-downloading.',
download_path
)
os.unlink(download_path)
return None
return download_path
return None
| apache-2.0 | 6,741,434,362,404,756,000 | 35.155098 | 79 | 0.60069 | false |
xin1195/smartSearch | test/hanzi.py | 1 | 2343 | #!/usr/bin/env python3
# _*_coding:utf-8_*_
# Created by "LiuXin"
# Time
import traceback
import pymongo
# text = """吖】ā[吖嗪](āqín)名有机化合物的一类,呈环状结构,含有一个或几个氮原子,如吡啶、哒嗪、嘧啶等。[英azine]
#
# 【阿】ā〈方〉前缀。①用在排行、小名或姓的前面,有亲昵的意味:~大|~宝|~唐。②用在某些亲属名称的前面:~婆|~爹|~哥。
# 另见2页•ɑ“啊”;354页ē。
#
# 【阿鼻地狱】ābídìyù佛教指最深层的地狱,是犯了重罪的人死后灵魂永远受苦的地方。"""
# file = open('zidian_c.txt', "r", encoding="utf-8")
# try:
# word_list = []
# all_the_text = file.read()
# text_list = all_the_text.split("【")
# for text in text_list:
# temp_list = text.split("】")
# word_list.append({"name": temp_list[0], "description": temp_list[1]})
# print(word_list)
# finally:
# file.close()
# for text in texts:
# if "\u4E00" < text < "\u9FBF":
# if text == "\u3010":
# print(text)
# if text == "\u3011":
# print(text)
# if "\u0000" <= text <= "\u024F":
# print(text)
# text_list = text.split("【")
# for text in text_list:
# text_list_list = text.split("】")
# print(text_list_list)
from setting import logger
conn = pymongo.Connection(host='112.74.204.250', port=27017)
db = conn.smartSearch
def get_word_list(file_name):
w_list = []
f = open(file_name, "r", encoding="utf-8")
try:
all_text = f.read()
t_list = all_text.split("【")
for tex in t_list:
tex = tex.replace("\n", "")
if tex and tex.find("】"):
tem_list = tex.split("】")
if tem_list[0] and tem_list[1]:
w_list.append({"name": tem_list[0], "description": tem_list[1]})
return w_list
except:
logger.error(traceback.format_exc())
return w_list
finally:
f.close()
def update_to_mongo(w_list):
for w_dict in w_list:
print(w_dict)
db.info_word.update({"name": w_dict.get("name", "")}, w_dict, upsert=True)
def main():
w_list = get_word_list("zidian.txt")
# print(w_list)
update_to_mongo(w_list)
print("更新完成")
if __name__ == "__main__":
main()
| apache-2.0 | -3,830,903,602,155,663,400 | 23.402439 | 84 | 0.554223 | false |
danlangridge/galah | galah/web/views/_api.py | 1 | 7617 | # Copyright 2012-2013 John Sullivan
# Copyright 2012-2013 Other contributers as noted in the CONTRIBUTERS file
#
# This file is part of Galah.
#
# Galah is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Galah is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Galah. If not, see <http://www.gnu.org/licenses/>.
from flask import Response, request
from galah.web import app, oauth_enabled
from flask.ext.login import current_user
from galah.web.api.commands import api_calls, UserError
from galah.base.crypto.passcrypt import check_seal, deserialize_seal
from galah.db.models import User
from flask.ext.login import login_user
from galah.web.auth import FlaskUser
from galah.web.util import GalahWebAdapter
import requests
import logging
logger = GalahWebAdapter(logging.getLogger("galah.web.views.api"))
def get_many(dictionary, *args):
return dict((i, dictionary.get(i)) for i in args)
@app.route("/api/login", methods = ["POST"])
def api_login():
def success(the_user):
login_user(the_user)
return Response(
response = "Successfully logged in.",
headers = {"X-CallSuccess": "True"}
)
def failure():
return Response(
response = "Incorrect email or password.",
headers = {"X-CallSuccess": "False"}
)
# If the user is trying to login by giving us an access_token they got from
# signing in through google, validate the token.
access_token = request.form.get("access_token", None)
if access_token and oauth_enabled:
# Ask google to verify that they authed the user.
req = requests.post(
"https://www.googleapis.com/oauth2/v1/tokeninfo",
data = { "access_token": access_token }
)
req_json = req.json()
email = req_json.get("email", "unknown")
if req.status_code != requests.codes.ok:
logger.info("Invalid OAuth2 login by %s.", email)
return failure()
# Validate client id is matching to avoid confused deputy attack
if req_json["audience"] != app.config["GOOGLE_APICLIENT_ID"]:
logger.error(
"Non-matching audience field detected for user %s.", email
)
return failure()
# Grab the user from the database (here we also check to make sure that
# this user actually has account on Galah).
try:
user = FlaskUser(User.objects.get(email = req_json["email"]))
except User.DoesNotExist:
logger.info(
"User %s signed in via OAuth2 but a Galah account does not "
"exist for that user.", email
)
return failure()
logger.info("User %s successfully signed in with OAuth2.", email)
return success(user)
elif access_token and not oauth_enabled:
logger.warning("Attempted login via OAuth2 but OAuth2 is not configured.")
return failure()
# If the user is trying to authenticate through Galah...
password = request.form.get("password", None)
email = request.form.get("email", None)
if email and password:
try:
user = FlaskUser(User.objects.get(email = email))
except User.DoesNotExist:
logger.info(
"User %s tried to sign in via internal auth but a Galah "
"account does not exist for that user.", email
)
return failure()
if check_seal(password, deserialize_seal(str(user.seal))):
logger.info(
"User %s succesfully authenticated with internal auth.", email
)
return success(user)
else:
logger.info(
"User %s tried to sign in via internal auth but an invalid "
"password was given.", email
)
return failure()
logger.warning("Malformed request.")
return failure()
@app.route("/api/call", methods = ["POST"])
def api_call():
try:
# Make sure we got some data we can work with
if request.json is None:
raise UserError("No request data sent.")
# The top level object must be either a non-empty list or a dictionary
# with an api_call key. They will have similar information either way
# however, so here we extract that information.
if type(request.json) is list and request.json:
# The API call's name is the first item in the list, so use that
# to grab the actual APICall object we need.
api_name = request.json.pop(0)
api_args = list(request.json)
api_kwargs = {}
elif type(request.json) is dict and "api_name" in request.json:
# Don't let the user insert their own current_user argument
if "current_user" in request.json:
raise UserError("You cannot fool the all-knowing Galah.")
# Resolve the name of the API call and retrieve the actual
# APICall object we need.
api_name = request.json["api_name"]
api_args = request.json["args"]
del request.json["api_name"]
del request.json["args"]
api_kwargs = dict(**request.json)
else:
logger.warning("Could not parse request.")
raise UserError("Request data not formed correctly.")
if api_name in api_calls:
api_call = api_calls[api_name]
else:
raise UserError("%s is not a recognized API call." % api_name)
logger.info(
"API call %s with args=%s and kwargs=%s",
api_name, str(api_args), str(api_kwargs)
)
call_result = api_call(current_user, *api_args, **api_kwargs)
# If an API call returns a tuple, the second item will be headers that
# should be added onto the response.
response_headers = {"X-CallSuccess": True}
if isinstance(call_result, tuple):
response_text = call_result[0]
response_headers.update(call_result[1])
else:
response_text = call_result
return Response(
response = response_text,
headers = response_headers,
mimetype = "text/plain"
)
except UserError as e:
logger.info("UserError raised during API call: %s.", str(e))
return Response(
response = "Your command cannot be completed as entered: %s" %
str(e),
headers = {
"X-CallSuccess": "False",
"X-ErrorType": e.__class__.__name__
},
mimetype = "text/plain"
)
except Exception as e:
logger.exception("Exception occured while processing API request.")
return Response(
response =
"An internal server error occurred processing your request.",
headers = {
"X-CallSuccess": "False",
"X-ErrorType": "Exception"
},
mimetype = "text/plain"
)
| agpl-3.0 | -3,806,611,168,316,241,400 | 34.101382 | 82 | 0.598136 | false |
muntasirsyed/intellij-community | python/lib/Lib/site-packages/django/contrib/localflavor/in_/in_states.py | 296 | 1859 | """
A mapping of state misspellings/abbreviations to normalized abbreviations, and
an alphabetical list of states for use as `choices` in a formfield.
This exists in this standalone file so that it's only imported into memory
when explicitly needed.
"""
STATE_CHOICES = (
('KA', 'Karnataka'),
('AP', 'Andhra Pradesh'),
('KL', 'Kerala'),
('TN', 'Tamil Nadu'),
('MH', 'Maharashtra'),
('UP', 'Uttar Pradesh'),
('GA', 'Goa'),
('GJ', 'Gujarat'),
('RJ', 'Rajasthan'),
('HP', 'Himachal Pradesh'),
('JK', 'Jammu and Kashmir'),
('AR', 'Arunachal Pradesh'),
('AS', 'Assam'),
('BR', 'Bihar'),
('CG', 'Chattisgarh'),
('HR', 'Haryana'),
('JH', 'Jharkhand'),
('MP', 'Madhya Pradesh'),
('MN', 'Manipur'),
('ML', 'Meghalaya'),
('MZ', 'Mizoram'),
('NL', 'Nagaland'),
('OR', 'Orissa'),
('PB', 'Punjab'),
('SK', 'Sikkim'),
('TR', 'Tripura'),
('UA', 'Uttarakhand'),
('WB', 'West Bengal'),
# Union Territories
('AN', 'Andaman and Nicobar'),
('CH', 'Chandigarh'),
('DN', 'Dadra and Nagar Haveli'),
('DD', 'Daman and Diu'),
('DL', 'Delhi'),
('LD', 'Lakshadweep'),
('PY', 'Pondicherry'),
)
STATES_NORMALIZED = {
'ka': 'KA',
'karnatka': 'KA',
'tn': 'TN',
'tamilnad': 'TN',
'tamilnadu': 'TN',
'andra pradesh': 'AP',
'andrapradesh': 'AP',
'andhrapradesh': 'AP',
'maharastra': 'MH',
'mh': 'MH',
'ap': 'AP',
'dl': 'DL',
'dd': 'DD',
'br': 'BR',
'ar': 'AR',
'sk': 'SK',
'kl': 'KL',
'ga': 'GA',
'rj': 'RJ',
'rajastan': 'RJ',
'rajasthan': 'RJ',
'hp': 'HP',
'ua': 'UA',
'up': 'UP',
'mp': 'MP',
'mz': 'MZ',
'bengal': 'WB',
'westbengal': 'WB',
'mizo': 'MZ',
'orisa': 'OR',
'odisa': 'OR',
'or': 'OR',
'ar': 'AR',
}
| apache-2.0 | 8,666,369,970,084,760,000 | 21.130952 | 78 | 0.470683 | false |
oxtopus/nupic | nupic/regions/ImageSensorExplorers/RandomSweep.py | 17 | 10407 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from nupic.regions.ImageSensorExplorers.BaseExplorer import BaseExplorer
class RandomSweep(BaseExplorer):
"""
This explorer performs randomly-selected horizontal or vertical sweeps, and
it switches to a randomly-selected image in between each sweep.
"""
def __init__(self, sweepDirections=['left', 'right', 'up', 'down'],
shiftDuringSweep=1, sweepOffObject=False, *args, **kwargs):
"""
sweepDirections -- Directions for sweeping. Must be a list containing
one or more of 'left', 'right', 'up', and 'down' for horizontal and
vertical sweeps, or 'leftup', 'leftdown', 'rightup', and 'rightdown'
for diagonal sweeps (or 'upleft, 'downleft', 'upright', and
'downright'). Can also be the string 'all', for all eight directions.
shiftDuringSweep -- Number of pixels to jump with each step (during
a sweep).
sweepOffObject -- Whether the sensor can only include a part of the
object, as specified by the bounding box. If False, it will only move
to positions that include as much of the object as possible.
"""
BaseExplorer.__init__(self, *args, **kwargs)
if sweepDirections == 'all':
sweepDirections = ['left', 'right', 'up', 'down',
'leftdown', 'leftup', 'rightdown', 'rightup']
else:
for direction in sweepDirections:
if direction not in ('left', 'right', 'up', 'down',
'leftup', 'upleft', 'leftdown', 'downleft',
'rightup', 'upright', 'rightdown', 'downright'):
raise RuntimeError('Unknown sweep direction: %s' % direction)
if type(shiftDuringSweep) is not int:
raise RuntimeError("'shiftDuringSweep' should be an integer")
if type(sweepOffObject) not in (bool, int):
raise RuntimeError("'sweepOffObject' should be a boolean")
self.sweepDirections = sweepDirections
self.shiftDuringSweep = shiftDuringSweep
self.sweepOffObject = sweepOffObject
def first(self):
"""
Set up the position.
BaseExplorer picks image 0, offset (0,0), etc., but explorers that wish
to set a different first position should extend this method. Such explorers
may wish to call BaseExplorer.first(center=False), which initializes the
position tuple but does not call centerImage() (which could cause
unnecessary filtering to occur).
"""
BaseExplorer.first(self, center=False)
if not self.numImages:
return
# Pick a random direction and filtered image
self.direction = self.random.choice(self.sweepDirections)
self.position['image'] = self.random.randint(0, self.numImages - 1)
for i in xrange(self.numFilters):
self.position['filters'][i] = self.random.randint(0,
self.numFilterOutputs[i] - 1)
filteredImages = self.getFilteredImages()
# Pick a random starting position on the appropriate edge of the image
sbbox = self._getSweepBoundingBox(filteredImages[0])
if self.direction == 'left':
self.position['offset'][0] = sbbox[2] - 1
self.position['offset'][1] = self.random.randint(sbbox[1], sbbox[3] - 1)
elif self.direction == 'right':
self.position['offset'][0] = sbbox[0]
self.position['offset'][1] = self.random.randint(sbbox[1], sbbox[3] - 1)
elif self.direction == 'up':
self.position['offset'][0] = self.random.randint(sbbox[0], sbbox[2] - 1)
self.position['offset'][1] = sbbox[3] - 1
elif self.direction == 'down':
self.position['offset'][0] = self.random.randint(sbbox[0], sbbox[2] - 1)
self.position['offset'][1] = sbbox[1]
elif self.direction in ('leftup', 'upleft'):
if self.random.randint(0,1):
self.position['offset'][0] = \
self.random.randint(sbbox[0] + (sbbox[2] - sbbox[0])/2, sbbox[2] - 1)
self.position['offset'][1] = sbbox[3] - 1
else:
self.position['offset'][0] = sbbox[2] - 1
self.position['offset'][1] = \
self.random.randint(sbbox[1] + (sbbox[3] - sbbox[1])/2, sbbox[3] - 1)
elif self.direction in ('leftdown', 'downleft'):
if self.random.randint(0,1):
self.position['offset'][0] = \
self.random.randint(sbbox[0] + (sbbox[2] - sbbox[0])/2, sbbox[2] - 1)
self.position['offset'][1] = sbbox[1]
else:
self.position['offset'][0] = sbbox[2] - 1
self.position['offset'][1] = \
self.random.randint(sbbox[1], sbbox[3] - 1 - (sbbox[3] - sbbox[1])/2)
elif self.direction in ('rightup', 'upright'):
if self.random.randint(0,1):
self.position['offset'][0] = \
self.random.randint(sbbox[0], sbbox[2] - 1 - (sbbox[2] - sbbox[0])/2)
self.position['offset'][1] = sbbox[3] - 1
else:
self.position['offset'][0] = sbbox[0]
self.position['offset'][1] = \
self.random.randint(sbbox[1] + (sbbox[3] - sbbox[1])/2, sbbox[3] - 1)
elif self.direction in ('rightdown', 'downright'):
if self.random.randint(0,1):
self.position['offset'][0] = \
self.random.randint(sbbox[0], sbbox[2] - 1 - (sbbox[2] - sbbox[0])/2)
self.position['offset'][1] = sbbox[1]
else:
self.position['offset'][0] = sbbox[0]
self.position['offset'][1] = \
self.random.randint(sbbox[1], sbbox[3] - 1 - (sbbox[3] - sbbox[1])/2)
# Increment the position by a random amount in the range
# [0, shiftDuringSweep)
if self.shiftDuringSweep > 1:
prevShiftDuringSweep = self.shiftDuringSweep
self.shiftDuringSweep = self.random.randint(0, self.shiftDuringSweep)
self._nextSweepPosition()
self.shiftDuringSweep = prevShiftDuringSweep
if self.position['reset']:
self.first()
self.position['reset'] = True
def next(self, seeking=False):
"""
Go to the next position (next iteration).
seeking -- Boolean that indicates whether the explorer is calling next()
from seek(). If True, the explorer should avoid unnecessary computation
that would not affect the seek command. The last call to next() from
seek() will be with seeking=False.
"""
BaseExplorer.next(self)
if self.position['reset'] and self.blankWithReset:
# Last iteration was a blank, so don't increment the position
self.position['reset'] = False
else:
self.position['reset'] = False
self._nextSweepPosition()
# Begin a new sweep if necessary
if self.position['reset']:
self.first()
def _nextSweepPosition(self):
"""
Go to the next position in the current sweep.
"""
filteredImages = self.getFilteredImages()
sbbox = self._getSweepBoundingBox(filteredImages[0])
if self.direction == 'left':
self.position['offset'][0] -= self.shiftDuringSweep
if self.position['offset'][0] < sbbox[0]:
self.position['reset'] = True
elif self.direction == 'right':
self.position['offset'][0] += self.shiftDuringSweep
if self.position['offset'][0] >= sbbox[2]:
self.position['reset'] = True
elif self.direction == 'up':
self.position['offset'][1] -= self.shiftDuringSweep
if self.position['offset'][1] < sbbox[1]:
self.position['reset'] = True
elif self.direction == 'down':
self.position['offset'][1] += self.shiftDuringSweep
if self.position['offset'][1] >= sbbox[3]:
self.position['reset'] = True
elif self.direction in ('leftup', 'upleft'):
self.position['offset'][0] -= self.shiftDuringSweep
self.position['offset'][1] -= self.shiftDuringSweep
if self.position['offset'][0] < sbbox[0] \
or self.position['offset'][1] < sbbox[1]:
self.position['reset'] = True
elif self.direction in ('leftdown', 'downleft'):
self.position['offset'][0] -= self.shiftDuringSweep
self.position['offset'][1] += self.shiftDuringSweep
if self.position['offset'][0] < sbbox[0] \
or self.position['offset'][1] >= sbbox[3]:
self.position['reset'] = True
elif self.direction in ('rightup', 'upright'):
self.position['offset'][0] += self.shiftDuringSweep
self.position['offset'][1] -= self.shiftDuringSweep
if self.position['offset'][0] >= sbbox[2] \
or self.position['offset'][1] < sbbox[1]:
self.position['reset'] = True
elif self.direction in ('rightdown', 'downright'):
self.position['offset'][0] += self.shiftDuringSweep
self.position['offset'][1] += self.shiftDuringSweep
if self.position['offset'][0] >= sbbox[2] \
or self.position['offset'][1] >= sbbox[3]:
self.position['reset'] = True
def _getSweepBoundingBox(self, image):
"""
Calculate a 'sweep bounding box' from the image's bounding box.
If 'sbbox' is the bounding box returned from this method, valid sweep
positions [x,y] are bounded by sbbox[0] <= x < sbbox[2] and
sbbox[1] <= y < sbbox[3].
"""
bbox = image.split()[1].getbbox()
if bbox is None:
bbox = (0,0,1,1)
if self.sweepOffObject:
startX = bbox[0] - self.enabledWidth + 1
startY = bbox[1] - self.enabledHeight + 1
endX = bbox[2]
endY = bbox[3]
else:
startX = min(bbox[0], bbox[2] - self.enabledWidth)
startY = min(bbox[1], bbox[3] - self.enabledHeight)
endX = max(bbox[0], bbox[2] - self.enabledWidth) + 1
endY = max(bbox[1], bbox[3] - self.enabledHeight) + 1
return (startX, startY, endX, endY)
| gpl-3.0 | -4,005,250,883,600,172,500 | 40.96371 | 79 | 0.627943 | false |
Shekharrajak/django-db-mailer | dbmail/south_migrations/0034_auto__chg_field_mailgroupemail_email__chg_field_mailfromemail_email__c.py | 2 | 19791 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'MailGroupEmail.email'
db.alter_column('dbmail_mailgroupemail', 'email', self.gf('django.db.models.fields.CharField')(max_length=75))
# Changing field 'MailFromEmail.email'
db.alter_column('dbmail_mailfromemail', 'email', self.gf('django.db.models.fields.CharField')(unique=True, max_length=75))
# Changing field 'MailLogEmail.email'
db.alter_column('dbmail_maillogemail', 'email', self.gf('django.db.models.fields.CharField')(max_length=75))
# Changing field 'MailTemplate.from_email'
db.alter_column('dbmail_mailtemplate', 'from_email_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['dbmail.MailFromEmail'], null=True, on_delete=models.SET_NULL))
def backwards(self, orm):
# Changing field 'MailGroupEmail.email'
db.alter_column('dbmail_mailgroupemail', 'email', self.gf('django.db.models.fields.EmailField')(max_length=75))
# Changing field 'MailFromEmail.email'
db.alter_column('dbmail_mailfromemail', 'email', self.gf('django.db.models.fields.EmailField')(max_length=75, unique=True))
# Changing field 'MailLogEmail.email'
db.alter_column('dbmail_maillogemail', 'email', self.gf('django.db.models.fields.EmailField')(max_length=75))
# Changing field 'MailTemplate.from_email'
db.alter_column('dbmail_mailtemplate', 'from_email_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['dbmail.MailFromEmail'], null=True))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'dbmail.apikey': {
'Meta': {'object_name': 'ApiKey'},
'api_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.mailbcc': {
'Meta': {'object_name': 'MailBcc'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.mailcategory': {
'Meta': {'object_name': 'MailCategory'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '25'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.mailfile': {
'Meta': {'object_name': 'MailFile'},
'filename': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'files'", 'to': "orm['dbmail.MailTemplate']"})
},
'dbmail.mailfromemail': {
'Meta': {'object_name': 'MailFromEmail'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'credential': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['dbmail.MailFromEmailCredential']", 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.mailfromemailcredential': {
'Meta': {'object_name': 'MailFromEmailCredential'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'fail_silently': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'host': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'port': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'use_tls': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
'dbmail.mailgroup': {
'Meta': {'object_name': 'MailGroup'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.mailgroupemail': {
'Meta': {'unique_together': "(('email', 'group'),)", 'object_name': 'MailGroupEmail'},
'email': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'emails'", 'to': "orm['dbmail.MailGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'dbmail.maillog': {
'Meta': {'object_name': 'MailLog'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'error_exception': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dbmail.MailLogException']", 'null': 'True', 'blank': 'True'}),
'error_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_sent': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'log_id': ('django.db.models.fields.CharField', [], {'max_length': '60', 'db_index': 'True'}),
'num_of_retries': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dbmail.MailTemplate']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'dbmail.maillogemail': {
'Meta': {'object_name': 'MailLogEmail'},
'email': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'log': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dbmail.MailLog']"}),
'mail_type': ('django.db.models.fields.CharField', [], {'max_length': '3'})
},
'dbmail.maillogexception': {
'Meta': {'object_name': 'MailLogException'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '150'})
},
'dbmail.maillogtrack': {
'Meta': {'object_name': 'MailLogTrack'},
'counter': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39'}),
'ip_area_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_country_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_country_code3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_country_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_dma_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_latitude': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_longitude': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_postal_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_region': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'is_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'mail_log': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dbmail.MailLog']"}),
'ua': ('django.db.models.fields.CharField', [], {'max_length': '350', 'null': 'True', 'blank': 'True'}),
'ua_browser': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'ua_browser_version': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'ua_dist': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'ua_dist_version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'ua_os': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'ua_os_version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.mailtemplate': {
'Meta': {'object_name': 'MailTemplate'},
'bcc_email': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['dbmail.MailBcc']", 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['dbmail.MailCategory']", 'null': 'True', 'blank': 'True'}),
'context_note': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'enable_log': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'from_email': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['dbmail.MailFromEmail']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interval': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_html': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'num_of_retries': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'priority': ('django.db.models.fields.SmallIntegerField', [], {'default': '6'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.signal': {
'Meta': {'object_name': 'Signal'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dbmail.MailGroup']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interval': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'receive_once': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'rules': ('django.db.models.fields.TextField', [], {'default': "'{{ instance.email }}'", 'null': 'True', 'blank': 'True'}),
'signal': ('django.db.models.fields.CharField', [], {'default': "'post_save'", 'max_length': '15'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dbmail.MailTemplate']"}),
'update_model': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.signaldeferreddispatch': {
'Meta': {'object_name': 'SignalDeferredDispatch'},
'args': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'done': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'eta': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kwargs': ('django.db.models.fields.TextField', [], {}),
'params': ('django.db.models.fields.TextField', [], {})
},
'dbmail.signallog': {
'Meta': {'object_name': 'SignalLog'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'model_pk': ('django.db.models.fields.BigIntegerField', [], {}),
'signal': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dbmail.Signal']"})
}
}
complete_apps = ['dbmail'] | gpl-2.0 | 1,813,070,405,000,121,300 | 78.168 | 200 | 0.554191 | false |
ehostunreach/qemu | scripts/tracetool/format/events_h.py | 78 | 1406 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
trace/generated-events.h
"""
__author__ = "Lluís Vilanova <[email protected]>"
__copyright__ = "Copyright 2012-2014, Lluís Vilanova <[email protected]>"
__license__ = "GPL version 2 or (at your option) any later version"
__maintainer__ = "Stefan Hajnoczi"
__email__ = "[email protected]"
from tracetool import out
def generate(events, backend):
out('/* This file is autogenerated by tracetool, do not edit. */',
'',
'#ifndef TRACE__GENERATED_EVENTS_H',
'#define TRACE__GENERATED_EVENTS_H',
'',
'#include <stdbool.h>',
'')
# event identifiers
out('typedef enum {')
for e in events:
out(' TRACE_%s,' % e.name.upper())
out(' TRACE_EVENT_COUNT',
'} TraceEventID;')
# static state
for e in events:
if 'disable' in e.properties:
enabled = 0
else:
enabled = 1
if "tcg-trans" in e.properties:
# a single define for the two "sub-events"
out('#define TRACE_%(name)s_ENABLED %(enabled)d',
name=e.original.original.name.upper(),
enabled=enabled)
out('#define TRACE_%s_ENABLED %d' % (e.name.upper(), enabled))
out('#include "trace/event-internal.h"',
'',
'#endif /* TRACE__GENERATED_EVENTS_H */')
| gpl-2.0 | -8,837,645,216,981,743,000 | 26 | 76 | 0.554843 | false |
JRock007/boxxy | dist/Boxxy server.app/Contents/Resources/lib/python2.7/pygame/tests/surflock_test.py | 5 | 5483 | if __name__ == '__main__':
import sys
import os
pkg_dir = os.path.split(os.path.abspath(__file__))[0]
parent_dir, pkg_name = os.path.split(pkg_dir)
is_pygame_pkg = (pkg_name == 'tests' and
os.path.split(parent_dir)[1] == 'pygame')
if not is_pygame_pkg:
sys.path.insert(0, parent_dir)
else:
is_pygame_pkg = __name__.startswith('pygame.tests.')
if is_pygame_pkg:
from pygame.tests.test_utils import test_not_implemented, unittest
else:
from test.test_utils import test_not_implemented, unittest
import pygame
import sys
class SurfaceLockTest (unittest.TestCase):
def test_lock (self):
sf = pygame.Surface ((5, 5))
sf.lock ()
self.assertEquals (sf.get_locked (), True)
self.assertEquals (sf.get_locks (), (sf,))
sf.lock ()
self.assertEquals (sf.get_locked (), True)
self.assertEquals (sf.get_locks (), (sf, sf))
sf.unlock ()
self.assertEquals (sf.get_locked (), True)
self.assertEquals (sf.get_locks (), (sf,))
sf.unlock ()
self.assertEquals (sf.get_locked (), False)
self.assertEquals (sf.get_locks (), ())
def test_subsurface_lock (self):
sf = pygame.Surface ((5, 5))
subsf = sf.subsurface ((1, 1, 2, 2))
sf2 = pygame.Surface ((5, 5))
# Simple blits, nothing should happen here.
sf2.blit (subsf, (0, 0))
sf2.blit (sf, (0, 0))
# Test blitting on self:
self.assertRaises (pygame.error, sf.blit, subsf, (0, 0))
#self.assertRaises (pygame.error, subsf.blit, sf, (0, 0))
# ^ Fails although it should not in my opinion. If I cannot
# blit the subsurface to the surface, it should not be allowed
# the other way around as well.
# Test additional locks.
sf.lock ()
sf2.blit (subsf, (0, 0))
self.assertRaises (pygame.error, sf2.blit, sf, (0, 0))
subsf.lock ()
self.assertRaises (pygame.error, sf2.blit, subsf, (0, 0))
self.assertRaises (pygame.error, sf2.blit, sf, (0, 0))
# sf and subsf are now explicitly locked. Unlock sf, so we can
# (assume) to blit it.
# It will fail though as the subsurface still has a lock around,
# which is okay and correct behaviour.
sf.unlock ()
self.assertRaises (pygame.error, sf2.blit, subsf, (0, 0))
self.assertRaises (pygame.error, sf2.blit, sf, (0, 0))
# Run a second unlock on the surface. This should ideally have
# no effect as the subsurface is the locking reason!
sf.unlock ()
self.assertRaises (pygame.error, sf2.blit, sf, (0, 0))
self.assertRaises (pygame.error, sf2.blit, subsf, (0, 0))
subsf.unlock ()
sf.lock ()
self.assertEquals (sf.get_locked (), True)
self.assertEquals (sf.get_locks (), (sf,))
self.assertEquals (subsf.get_locked (), False)
self.assertEquals (subsf.get_locks (), ())
subsf.lock ()
self.assertEquals (sf.get_locked (), True)
self.assertEquals (sf.get_locks (), (sf, subsf))
self.assertEquals (subsf.get_locked (), True)
self.assertEquals (subsf.get_locks (), (subsf,))
sf.unlock ()
self.assertEquals (sf.get_locked (), True)
self.assertEquals (sf.get_locks (), (subsf,))
self.assertEquals (subsf.get_locked (), True)
self.assertEquals (subsf.get_locks (), (subsf,))
subsf.unlock ()
self.assertEquals (sf.get_locked (), False)
self.assertEquals (sf.get_locks (), ())
self.assertEquals (subsf.get_locked (), False)
self.assertEquals (subsf.get_locks (), ())
subsf.lock ()
self.assertEquals (sf.get_locked (), True)
self.assertEquals (sf.get_locks (), (subsf,))
self.assertEquals (subsf.get_locked (), True)
self.assertEquals (subsf.get_locks (), (subsf,))
subsf.lock ()
self.assertEquals (sf.get_locked (), True)
self.assertEquals (sf.get_locks (), (subsf, subsf))
self.assertEquals (subsf.get_locked (), True)
self.assertEquals (subsf.get_locks (), (subsf, subsf))
def test_pxarray_ref (self):
sf = pygame.Surface ((5, 5))
ar = pygame.PixelArray (sf)
ar2 = pygame.PixelArray (sf)
self.assertEquals (sf.get_locked (), True)
self.assertEquals (sf.get_locks (), (ar, ar2))
del ar
self.assertEquals (sf.get_locked (), True)
self.assertEquals (sf.get_locks (), (ar2,))
ar = ar2[:]
self.assertEquals (sf.get_locked (), True)
self.assertEquals (sf.get_locks (), (ar2,))
del ar
self.assertEquals (sf.get_locked (), True)
self.assertEquals (len (sf.get_locks ()), 1)
def test_buffer (self):
sf = pygame.Surface ((5, 5))
buf = sf.get_buffer ()
self.assertEquals (sf.get_locked (), True)
self.assertEquals (sf.get_locks (), (buf,))
sf.unlock ()
self.assertEquals (sf.get_locked (), True)
self.assertEquals (sf.get_locks (), (buf,))
del buf
self.assertEquals (sf.get_locked (), False)
self.assertEquals (sf.get_locks (), ())
if __name__ == '__main__':
unittest.main()
| mit | 2,047,549,133,576,590,800 | 33.374194 | 72 | 0.559183 | false |
cldershem/osf.io | tests/test_utils.py | 15 | 14688 | # -*- coding: utf-8 -*-
import os
import mock
import unittest
from flask import Flask
from nose.tools import * # noqa (PEP8 asserts)
import datetime
from tests.base import OsfTestCase
from tests.factories import RegistrationFactory
from framework.routing import Rule, json_renderer
from framework.utils import secure_filename
from website.routes import process_rules, OsfWebRenderer
from website import settings
from website.util import paths
from website.util.mimetype import get_mimetype
from website.util import web_url_for, api_url_for, is_json_request, waterbutler_url_for, conjunct, api_v2_url
from website.project import utils as project_utils
try:
import magic # noqa
LIBMAGIC_AVAILABLE = True
except ImportError:
LIBMAGIC_AVAILABLE = False
HERE = os.path.dirname(os.path.abspath(__file__))
class TestUrlForHelpers(unittest.TestCase):
def setUp(self):
def dummy_view():
return {}
def dummy_guid_project_view():
return {}
def dummy_guid_profile_view():
return {}
self.app = Flask(__name__)
api_rule = Rule([
'/api/v1/<pid>/',
'/api/v1/<pid>/component/<nid>/'
], 'get', dummy_view, json_renderer)
web_rule = Rule([
'/<pid>/',
'/<pid>/component/<nid>/'
], 'get', dummy_view, OsfWebRenderer)
web_guid_project_rule = Rule([
'/project/<pid>/',
'/project/<pid>/node/<nid>/',
], 'get', dummy_guid_project_view, OsfWebRenderer)
web_guid_profile_rule = Rule([
'/profile/<pid>/',
], 'get', dummy_guid_profile_view, OsfWebRenderer)
process_rules(self.app, [api_rule, web_rule, web_guid_project_rule, web_guid_profile_rule])
def test_api_url_for(self):
with self.app.test_request_context():
assert api_url_for('dummy_view', pid='123') == '/api/v1/123/'
def test_api_v2_url_with_port(self):
full_url = api_v2_url('/nodes/abcd3/contributors/',
base_route='http://localhost:8000/',
base_prefix='v2/')
assert_equal(full_url, "http://localhost:8000/v2/nodes/abcd3/contributors/")
# Handles URL the same way whether or not user enters a leading slash
full_url = api_v2_url('nodes/abcd3/contributors/',
base_route='http://localhost:8000/',
base_prefix='v2/')
assert_equal(full_url, "http://localhost:8000/v2/nodes/abcd3/contributors/")
# User is still responsible for the trailing slash. If they omit it, it doesn't appear at end of URL
full_url = api_v2_url('/nodes/abcd3/contributors',
base_route='http://localhost:8000/',
base_prefix='v2/')
assert_not_equal(full_url, "http://localhost:8000/v2/nodes/abcd3/contributors/")
def test_api_v2_url_with_params(self):
"""Handles- and encodes- URLs with parameters (dict and kwarg) correctly"""
full_url = api_v2_url('/nodes/abcd3/contributors/',
params={'filter[fullname]': 'bob'},
base_route='https://api.osf.io/',
base_prefix='v2/',
page_size=10)
assert_equal(full_url, "https://api.osf.io/v2/nodes/abcd3/contributors/?filter%5Bfullname%5D=bob&page_size=10")
def test_api_v2_url_base_path(self):
"""Given a blank string, should return the base path (domain + port + prefix) with no extra cruft at end"""
full_url = api_v2_url('',
base_route='http://localhost:8000/',
base_prefix='v2/')
assert_equal(full_url, "http://localhost:8000/v2/")
def test_web_url_for(self):
with self.app.test_request_context():
assert web_url_for('dummy_view', pid='123') == '/123/'
def test_web_url_for_guid(self):
with self.app.test_request_context():
# check /project/<pid>
assert_equal('/pid123/', web_url_for('dummy_guid_project_view', pid='pid123', _guid=True))
assert_equal('/project/pid123/', web_url_for('dummy_guid_project_view', pid='pid123', _guid=False))
assert_equal('/project/pid123/', web_url_for('dummy_guid_project_view', pid='pid123'))
# check /project/<pid>/node/<nid>
assert_equal('/nid321/', web_url_for('dummy_guid_project_view', pid='pid123', nid='nid321', _guid=True))
assert_equal(
'/project/pid123/node/nid321/',
web_url_for('dummy_guid_project_view', pid='pid123', nid='nid321', _guid=False))
assert_equal(
'/project/pid123/node/nid321/',
web_url_for('dummy_guid_project_view', pid='pid123', nid='nid321'))
# check /profile/<pid>
assert_equal('/pro123/', web_url_for('dummy_guid_profile_view', pid='pro123', _guid=True))
assert_equal('/profile/pro123/', web_url_for('dummy_guid_profile_view', pid='pro123', _guid=False))
assert_equal('/profile/pro123/', web_url_for('dummy_guid_profile_view', pid='pro123'))
def test_web_url_for_guid_regex_conditions(self):
with self.app.test_request_context():
# regex matches limit keys to a minimum of 5 alphanumeric characters.
# check /project/<pid>
assert_not_equal('/123/', web_url_for('dummy_guid_project_view', pid='123', _guid=True))
assert_equal('/123456/', web_url_for('dummy_guid_project_view', pid='123456', _guid=True))
# check /project/<pid>/node/<nid>
assert_not_equal('/321/', web_url_for('dummy_guid_project_view', pid='123', nid='321', _guid=True))
assert_equal('/654321/', web_url_for('dummy_guid_project_view', pid='123456', nid='654321', _guid=True))
# check /profile/<pid>
assert_not_equal('/123/', web_url_for('dummy_guid_profile_view', pid='123', _guid=True))
assert_equal('/123456/', web_url_for('dummy_guid_profile_view', pid='123456', _guid=True))
def test_web_url_for_guid_case_sensitive(self):
with self.app.test_request_context():
# check /project/<pid>
assert_equal('/ABCdef/', web_url_for('dummy_guid_project_view', pid='ABCdef', _guid=True))
# check /project/<pid>/node/<nid>
assert_equal('/GHIjkl/', web_url_for('dummy_guid_project_view', pid='ABCdef', nid='GHIjkl', _guid=True))
# check /profile/<pid>
assert_equal('/MNOpqr/', web_url_for('dummy_guid_profile_view', pid='MNOpqr', _guid=True))
def test_web_url_for_guid_invalid_unicode(self):
with self.app.test_request_context():
# unicode id's are not supported when encoding guid url's.
# check /project/<pid>
assert_not_equal('/ø∆≤µ©/', web_url_for('dummy_guid_project_view', pid='ø∆≤µ©', _guid=True))
assert_equal(
'/project/%C3%B8%CB%86%E2%88%86%E2%89%A4%C2%B5%CB%86/',
web_url_for('dummy_guid_project_view', pid='øˆ∆≤µˆ', _guid=True))
# check /project/<pid>/node/<nid>
assert_not_equal(
'/ø∆≤µ©/',
web_url_for('dummy_guid_project_view', pid='ø∆≤µ©', nid='©µ≤∆ø', _guid=True))
assert_equal(
'/project/%C3%B8%CB%86%E2%88%86%E2%89%A4%C2%B5%CB%86/node/%C2%A9%C2%B5%E2%89%A4%E2%88%86%C3%B8/',
web_url_for('dummy_guid_project_view', pid='øˆ∆≤µˆ', nid='©µ≤∆ø', _guid=True))
# check /profile/<pid>
assert_not_equal('/ø∆≤µ©/', web_url_for('dummy_guid_profile_view', pid='ø∆≤µ©', _guid=True))
assert_equal(
'/profile/%C3%B8%CB%86%E2%88%86%E2%89%A4%C2%B5%CB%86/',
web_url_for('dummy_guid_profile_view', pid='øˆ∆≤µˆ', _guid=True))
def test_api_url_for_with_multiple_urls(self):
with self.app.test_request_context():
url = api_url_for('dummy_view', pid='123', nid='abc')
assert url == '/api/v1/123/component/abc/'
def test_web_url_for_with_multiple_urls(self):
with self.app.test_request_context():
url = web_url_for('dummy_view', pid='123', nid='abc')
assert url == '/123/component/abc/'
def test_is_json_request(self):
with self.app.test_request_context(content_type='application/json'):
assert_true(is_json_request())
with self.app.test_request_context(content_type=None):
assert_false(is_json_request())
with self.app.test_request_context(content_type='application/json;charset=UTF-8'):
assert_true(is_json_request())
def test_waterbutler_url_for(self):
with self.app.test_request_context():
url = waterbutler_url_for('upload', 'provider', 'path', mock.Mock(_id='_id'))
assert_in('nid=_id', url)
assert_in('/file?', url)
assert_in('path=path', url)
assert_in('provider=provider', url)
def test_waterbutler_url_for_implicit_cookie(self):
with self.app.test_request_context() as context:
context.request.cookies = {settings.COOKIE_NAME: 'cookie'}
url = waterbutler_url_for('upload', 'provider', 'path', mock.Mock(_id='_id'))
assert_in('nid=_id', url)
assert_in('/file?', url)
assert_in('path=path', url)
assert_in('cookie=cookie', url)
assert_in('provider=provider', url)
def test_waterbutler_url_for_cookie_not_required(self):
with self.app.test_request_context():
url = waterbutler_url_for('upload', 'provider', 'path', mock.Mock(_id='_id'))
assert_not_in('cookie', url)
assert_in('nid=_id', url)
assert_in('/file?', url)
assert_in('path=path', url)
assert_in('provider=provider', url)
class TestGetMimeTypes(unittest.TestCase):
def test_get_markdown_mimetype_from_filename(self):
name = 'test.md'
mimetype = get_mimetype(name)
assert_equal('text/x-markdown', mimetype)
@unittest.skipIf(not LIBMAGIC_AVAILABLE, 'Must have python-magic and libmagic installed')
def test_unknown_extension_with_no_contents_not_real_file_results_in_exception(self):
name = 'test.thisisnotarealextensionidonotcarwhatyousay'
with assert_raises(IOError):
get_mimetype(name)
@unittest.skipIf(LIBMAGIC_AVAILABLE, 'This test only runs if python-magic and libmagic are not installed')
def test_unknown_extension_with_no_contents_not_real_file_results_in_exception2(self):
name = 'test.thisisnotarealextensionidonotcarwhatyousay'
mime_type = get_mimetype(name)
assert_equal(None, mime_type)
@unittest.skipIf(not LIBMAGIC_AVAILABLE, 'Must have python-magic and libmagic installed')
def test_unknown_extension_with_real_file_results_in_python_mimetype(self):
name = 'test_views.notarealfileextension'
maybe_python_file = os.path.join(HERE, 'test_files', name)
mimetype = get_mimetype(maybe_python_file)
assert_equal('text/x-python', mimetype)
@unittest.skipIf(not LIBMAGIC_AVAILABLE, 'Must have python-magic and libmagic installed')
def test_unknown_extension_with_python_contents_results_in_python_mimetype(self):
name = 'test.thisisnotarealextensionidonotcarwhatyousay'
python_file = os.path.join(HERE, 'test_utils.py')
with open(python_file, 'r') as the_file:
content = the_file.read()
mimetype = get_mimetype(name, content)
assert_equal('text/x-python', mimetype)
class TestFrameworkUtils(unittest.TestCase):
def test_leading_underscores(self):
assert_equal(
'__init__.py',
secure_filename('__init__.py')
)
def test_werkzeug_cases(self):
"""Test that Werkzeug's tests still pass for our wrapped version"""
# Copied from Werkzeug
# BSD licensed - original at github.com/mitsuhiko/werkzeug,
# /tests/test_utils.py, line 282, commit 811b438
assert_equal(
'My_cool_movie.mov',
secure_filename('My cool movie.mov')
)
assert_equal(
'etc_passwd',
secure_filename('../../../etc/passwd')
)
assert_equal(
'i_contain_cool_umlauts.txt',
secure_filename(u'i contain cool \xfcml\xe4uts.txt')
)
class TestWebpackFilter(unittest.TestCase):
def setUp(self):
self.asset_paths = {'assets': 'assets.07123e.js'}
def test_resolve_asset(self):
asset = paths.webpack_asset('assets.js', self.asset_paths, debug=False)
assert_equal(asset, '/static/public/js/assets.07123e.js')
def test_resolve_asset_not_found_and_not_in_debug_mode(self):
with assert_raises(KeyError):
paths.webpack_asset('bundle.js', self.asset_paths, debug=False)
class TestWebsiteUtils(unittest.TestCase):
def test_conjunct(self):
words = []
assert_equal(conjunct(words), '')
words = ['a']
assert_equal(conjunct(words), 'a')
words = ['a', 'b']
assert_equal(conjunct(words), 'a and b')
words = ['a', 'b', 'c']
assert_equal(conjunct(words), 'a, b, and c')
assert_equal(conjunct(words, conj='or'), 'a, b, or c')
class TestProjectUtils(OsfTestCase):
def set_registered_date(self, reg, date):
reg._fields['registered_date'].__set__(
reg,
date,
safe=True
)
reg.save()
def test_get_recent_public_registrations(self):
count = 0
for i in range(5):
reg = RegistrationFactory()
reg.is_public = True
count = count + 1
tdiff = datetime.datetime.now() - datetime.timedelta(days=count)
self.set_registered_date(reg, tdiff)
regs = [r for r in project_utils.recent_public_registrations()]
assert_equal(len(regs), 5)
for i in range(4):
assert_true(regs[i].registered_date > regs[i + 1].registered_date)
for i in range(5):
reg = RegistrationFactory()
reg.is_public = True
count = count + 1
tdiff = datetime.datetime.now() - datetime.timedelta(days=count)
self.set_registered_date(reg, tdiff)
regs = [r for r in project_utils.recent_public_registrations(7)]
assert_equal(len(regs), 7)
| apache-2.0 | 8,479,762,644,867,852,000 | 42.218935 | 119 | 0.591867 | false |
akvo/akvo-rsr | akvo/rsr/management/commands/lock_unlock_periods.py | 1 | 1998 | # -*- coding: utf-8 -*-
# Akvo Reporting is covered by the GNU Affero General Public License.
# See more details in the license.txt file located at the root folder of the Akvo RSR module.
# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
import sys
from django.core.management.base import BaseCommand
from ...models import IndicatorPeriod, Keyword, Project
class Command(BaseCommand):
help = 'Script for locking and unlocking periods based on Keyword'
def add_arguments(self, parser):
parser.add_argument(
'action',
type=str,
choices=['lock', 'unlock'],
help='Action to take - lock or unlock'
)
parser.add_argument(
'-k',
'--keyword',
action='store',
dest='keyword',
help='Keyword to use for filtering Indicator Periods'
)
def handle(self, *args, **options):
# parse options
verbosity = int(options['verbosity'])
keyword = options.get('keyword')
action = options.get('action')
if not keyword:
print('Keyword argument is required')
sys.exit(1)
try:
keyword = Keyword.objects.get(label=keyword)
except Keyword.DoesNotExist:
print('Keyword does not exist')
sys.exit(1)
projects = Project.objects.filter(keywords__in=[keyword])
indicator_periods = IndicatorPeriod.objects.filter(indicator__result__project_id__in=projects)
count = indicator_periods.count()
if count == 0:
print('No indicator periods found to {}'.format(action))
sys.exit(0)
if verbosity > 1:
self.stdout.write('{}ing {} periods'.format(action.capitalize(), count))
locked = action == 'lock'
indicator_periods.update(locked=locked)
self.stdout.write('{}ed {} periods'.format(action.capitalize(), count))
| agpl-3.0 | 6,458,502,891,060,040,000 | 30.714286 | 102 | 0.603604 | false |
beiko-lab/gengis | bin/Lib/lib2to3/tests/data/py2_test_grammar.py | 18 | 31954 | # Python test set -- part 1, grammar.
# This just tests whether the parser accepts them all.
# NOTE: When you run this test as a script from the command line, you
# get warnings about certain hex/oct constants. Since those are
# issued by the parser, you can't suppress them by adding a
# filterwarnings() call to this module. Therefore, to shut up the
# regression test, the filterwarnings() call has been added to
# regrtest.py.
from test.test_support import run_unittest, check_syntax_error
import unittest
import sys
# testing import *
from sys import *
class TokenTests(unittest.TestCase):
def testBackslash(self):
# Backslash means line continuation:
x = 1 \
+ 1
self.assertEquals(x, 2, 'backslash for line continuation')
# Backslash does not means continuation in comments :\
x = 0
self.assertEquals(x, 0, 'backslash ending comment')
def testPlainIntegers(self):
self.assertEquals(0xff, 255)
self.assertEquals(0377, 255)
self.assertEquals(2147483647, 017777777777)
# "0x" is not a valid literal
self.assertRaises(SyntaxError, eval, "0x")
from sys import maxint
if maxint == 2147483647:
self.assertEquals(-2147483647-1, -020000000000)
# XXX -2147483648
self.assert_(037777777777 > 0)
self.assert_(0xffffffff > 0)
for s in '2147483648', '040000000000', '0x100000000':
try:
x = eval(s)
except OverflowError:
self.fail("OverflowError on huge integer literal %r" % s)
elif maxint == 9223372036854775807:
self.assertEquals(-9223372036854775807-1, -01000000000000000000000)
self.assert_(01777777777777777777777 > 0)
self.assert_(0xffffffffffffffff > 0)
for s in '9223372036854775808', '02000000000000000000000', \
'0x10000000000000000':
try:
x = eval(s)
except OverflowError:
self.fail("OverflowError on huge integer literal %r" % s)
else:
self.fail('Weird maxint value %r' % maxint)
def testLongIntegers(self):
x = 0L
x = 0l
x = 0xffffffffffffffffL
x = 0xffffffffffffffffl
x = 077777777777777777L
x = 077777777777777777l
x = 123456789012345678901234567890L
x = 123456789012345678901234567890l
def testFloats(self):
x = 3.14
x = 314.
x = 0.314
# XXX x = 000.314
x = .314
x = 3e14
x = 3E14
x = 3e-14
x = 3e+14
x = 3.e14
x = .3e14
x = 3.1e4
def testStringLiterals(self):
x = ''; y = ""; self.assert_(len(x) == 0 and x == y)
x = '\''; y = "'"; self.assert_(len(x) == 1 and x == y and ord(x) == 39)
x = '"'; y = "\""; self.assert_(len(x) == 1 and x == y and ord(x) == 34)
x = "doesn't \"shrink\" does it"
y = 'doesn\'t "shrink" does it'
self.assert_(len(x) == 24 and x == y)
x = "does \"shrink\" doesn't it"
y = 'does "shrink" doesn\'t it'
self.assert_(len(x) == 24 and x == y)
x = """
The "quick"
brown fox
jumps over
the 'lazy' dog.
"""
y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n'
self.assertEquals(x, y)
y = '''
The "quick"
brown fox
jumps over
the 'lazy' dog.
'''
self.assertEquals(x, y)
y = "\n\
The \"quick\"\n\
brown fox\n\
jumps over\n\
the 'lazy' dog.\n\
"
self.assertEquals(x, y)
y = '\n\
The \"quick\"\n\
brown fox\n\
jumps over\n\
the \'lazy\' dog.\n\
'
self.assertEquals(x, y)
class GrammarTests(unittest.TestCase):
# single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
# XXX can't test in a script -- this rule is only used when interactive
# file_input: (NEWLINE | stmt)* ENDMARKER
# Being tested as this very moment this very module
# expr_input: testlist NEWLINE
# XXX Hard to test -- used only in calls to input()
def testEvalInput(self):
# testlist ENDMARKER
x = eval('1, 0 or 1')
def testFuncdef(self):
### 'def' NAME parameters ':' suite
### parameters: '(' [varargslist] ')'
### varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' ('**'|'*' '*') NAME]
### | ('**'|'*' '*') NAME)
### | fpdef ['=' test] (',' fpdef ['=' test])* [',']
### fpdef: NAME | '(' fplist ')'
### fplist: fpdef (',' fpdef)* [',']
### arglist: (argument ',')* (argument | *' test [',' '**' test] | '**' test)
### argument: [test '='] test # Really [keyword '='] test
def f1(): pass
f1()
f1(*())
f1(*(), **{})
def f2(one_argument): pass
def f3(two, arguments): pass
def f4(two, (compound, (argument, list))): pass
def f5((compound, first), two): pass
self.assertEquals(f2.func_code.co_varnames, ('one_argument',))
self.assertEquals(f3.func_code.co_varnames, ('two', 'arguments'))
if sys.platform.startswith('java'):
self.assertEquals(f4.func_code.co_varnames,
('two', '(compound, (argument, list))', 'compound', 'argument',
'list',))
self.assertEquals(f5.func_code.co_varnames,
('(compound, first)', 'two', 'compound', 'first'))
else:
self.assertEquals(f4.func_code.co_varnames,
('two', '.1', 'compound', 'argument', 'list'))
self.assertEquals(f5.func_code.co_varnames,
('.0', 'two', 'compound', 'first'))
def a1(one_arg,): pass
def a2(two, args,): pass
def v0(*rest): pass
def v1(a, *rest): pass
def v2(a, b, *rest): pass
def v3(a, (b, c), *rest): return a, b, c, rest
f1()
f2(1)
f2(1,)
f3(1, 2)
f3(1, 2,)
f4(1, (2, (3, 4)))
v0()
v0(1)
v0(1,)
v0(1,2)
v0(1,2,3,4,5,6,7,8,9,0)
v1(1)
v1(1,)
v1(1,2)
v1(1,2,3)
v1(1,2,3,4,5,6,7,8,9,0)
v2(1,2)
v2(1,2,3)
v2(1,2,3,4)
v2(1,2,3,4,5,6,7,8,9,0)
v3(1,(2,3))
v3(1,(2,3),4)
v3(1,(2,3),4,5,6,7,8,9,0)
# ceval unpacks the formal arguments into the first argcount names;
# thus, the names nested inside tuples must appear after these names.
if sys.platform.startswith('java'):
self.assertEquals(v3.func_code.co_varnames, ('a', '(b, c)', 'rest', 'b', 'c'))
else:
self.assertEquals(v3.func_code.co_varnames, ('a', '.1', 'rest', 'b', 'c'))
self.assertEquals(v3(1, (2, 3), 4), (1, 2, 3, (4,)))
def d01(a=1): pass
d01()
d01(1)
d01(*(1,))
d01(**{'a':2})
def d11(a, b=1): pass
d11(1)
d11(1, 2)
d11(1, **{'b':2})
def d21(a, b, c=1): pass
d21(1, 2)
d21(1, 2, 3)
d21(*(1, 2, 3))
d21(1, *(2, 3))
d21(1, 2, *(3,))
d21(1, 2, **{'c':3})
def d02(a=1, b=2): pass
d02()
d02(1)
d02(1, 2)
d02(*(1, 2))
d02(1, *(2,))
d02(1, **{'b':2})
d02(**{'a': 1, 'b': 2})
def d12(a, b=1, c=2): pass
d12(1)
d12(1, 2)
d12(1, 2, 3)
def d22(a, b, c=1, d=2): pass
d22(1, 2)
d22(1, 2, 3)
d22(1, 2, 3, 4)
def d01v(a=1, *rest): pass
d01v()
d01v(1)
d01v(1, 2)
d01v(*(1, 2, 3, 4))
d01v(*(1,))
d01v(**{'a':2})
def d11v(a, b=1, *rest): pass
d11v(1)
d11v(1, 2)
d11v(1, 2, 3)
def d21v(a, b, c=1, *rest): pass
d21v(1, 2)
d21v(1, 2, 3)
d21v(1, 2, 3, 4)
d21v(*(1, 2, 3, 4))
d21v(1, 2, **{'c': 3})
def d02v(a=1, b=2, *rest): pass
d02v()
d02v(1)
d02v(1, 2)
d02v(1, 2, 3)
d02v(1, *(2, 3, 4))
d02v(**{'a': 1, 'b': 2})
def d12v(a, b=1, c=2, *rest): pass
d12v(1)
d12v(1, 2)
d12v(1, 2, 3)
d12v(1, 2, 3, 4)
d12v(*(1, 2, 3, 4))
d12v(1, 2, *(3, 4, 5))
d12v(1, *(2,), **{'c': 3})
def d22v(a, b, c=1, d=2, *rest): pass
d22v(1, 2)
d22v(1, 2, 3)
d22v(1, 2, 3, 4)
d22v(1, 2, 3, 4, 5)
d22v(*(1, 2, 3, 4))
d22v(1, 2, *(3, 4, 5))
d22v(1, *(2, 3), **{'d': 4})
def d31v((x)): pass
d31v(1)
def d32v((x,)): pass
d32v((1,))
# keyword arguments after *arglist
def f(*args, **kwargs):
return args, kwargs
self.assertEquals(f(1, x=2, *[3, 4], y=5), ((1, 3, 4),
{'x':2, 'y':5}))
self.assertRaises(SyntaxError, eval, "f(1, *(2,3), 4)")
self.assertRaises(SyntaxError, eval, "f(1, x=2, *(3,4), x=5)")
# Check ast errors in *args and *kwargs
check_syntax_error(self, "f(*g(1=2))")
check_syntax_error(self, "f(**g(1=2))")
def testLambdef(self):
### lambdef: 'lambda' [varargslist] ':' test
l1 = lambda : 0
self.assertEquals(l1(), 0)
l2 = lambda : a[d] # XXX just testing the expression
l3 = lambda : [2 < x for x in [-1, 3, 0L]]
self.assertEquals(l3(), [0, 1, 0])
l4 = lambda x = lambda y = lambda z=1 : z : y() : x()
self.assertEquals(l4(), 1)
l5 = lambda x, y, z=2: x + y + z
self.assertEquals(l5(1, 2), 5)
self.assertEquals(l5(1, 2, 3), 6)
check_syntax_error(self, "lambda x: x = 2")
check_syntax_error(self, "lambda (None,): None")
### stmt: simple_stmt | compound_stmt
# Tested below
def testSimpleStmt(self):
### simple_stmt: small_stmt (';' small_stmt)* [';']
x = 1; pass; del x
def foo():
# verify statements that end with semi-colons
x = 1; pass; del x;
foo()
### small_stmt: expr_stmt | print_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt | exec_stmt
# Tested below
def testExprStmt(self):
# (exprlist '=')* exprlist
1
1, 2, 3
x = 1
x = 1, 2, 3
x = y = z = 1, 2, 3
x, y, z = 1, 2, 3
abc = a, b, c = x, y, z = xyz = 1, 2, (3, 4)
check_syntax_error(self, "x + 1 = 1")
check_syntax_error(self, "a + 1 = b + 2")
def testPrintStmt(self):
# 'print' (test ',')* [test]
import StringIO
# Can't test printing to real stdout without comparing output
# which is not available in unittest.
save_stdout = sys.stdout
sys.stdout = StringIO.StringIO()
print 1, 2, 3
print 1, 2, 3,
print
print 0 or 1, 0 or 1,
print 0 or 1
# 'print' '>>' test ','
print >> sys.stdout, 1, 2, 3
print >> sys.stdout, 1, 2, 3,
print >> sys.stdout
print >> sys.stdout, 0 or 1, 0 or 1,
print >> sys.stdout, 0 or 1
# test printing to an instance
class Gulp:
def write(self, msg): pass
gulp = Gulp()
print >> gulp, 1, 2, 3
print >> gulp, 1, 2, 3,
print >> gulp
print >> gulp, 0 or 1, 0 or 1,
print >> gulp, 0 or 1
# test print >> None
def driver():
oldstdout = sys.stdout
sys.stdout = Gulp()
try:
tellme(Gulp())
tellme()
finally:
sys.stdout = oldstdout
# we should see this once
def tellme(file=sys.stdout):
print >> file, 'hello world'
driver()
# we should not see this at all
def tellme(file=None):
print >> file, 'goodbye universe'
driver()
self.assertEqual(sys.stdout.getvalue(), '''\
1 2 3
1 2 3
1 1 1
1 2 3
1 2 3
1 1 1
hello world
''')
sys.stdout = save_stdout
# syntax errors
check_syntax_error(self, 'print ,')
check_syntax_error(self, 'print >> x,')
def testDelStmt(self):
# 'del' exprlist
abc = [1,2,3]
x, y, z = abc
xyz = x, y, z
del abc
del x, y, (z, xyz)
def testPassStmt(self):
# 'pass'
pass
# flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt
# Tested below
def testBreakStmt(self):
# 'break'
while 1: break
def testContinueStmt(self):
# 'continue'
i = 1
while i: i = 0; continue
msg = ""
while not msg:
msg = "ok"
try:
continue
msg = "continue failed to continue inside try"
except:
msg = "continue inside try called except block"
if msg != "ok":
self.fail(msg)
msg = ""
while not msg:
msg = "finally block not called"
try:
continue
finally:
msg = "ok"
if msg != "ok":
self.fail(msg)
def test_break_continue_loop(self):
# This test warrants an explanation. It is a test specifically for SF bugs
# #463359 and #462937. The bug is that a 'break' statement executed or
# exception raised inside a try/except inside a loop, *after* a continue
# statement has been executed in that loop, will cause the wrong number of
# arguments to be popped off the stack and the instruction pointer reset to
# a very small number (usually 0.) Because of this, the following test
# *must* written as a function, and the tracking vars *must* be function
# arguments with default values. Otherwise, the test will loop and loop.
def test_inner(extra_burning_oil = 1, count=0):
big_hippo = 2
while big_hippo:
count += 1
try:
if extra_burning_oil and big_hippo == 1:
extra_burning_oil -= 1
break
big_hippo -= 1
continue
except:
raise
if count > 2 or big_hippo <> 1:
self.fail("continue then break in try/except in loop broken!")
test_inner()
def testReturn(self):
# 'return' [testlist]
def g1(): return
def g2(): return 1
g1()
x = g2()
check_syntax_error(self, "class foo:return 1")
def testYield(self):
check_syntax_error(self, "class foo:yield 1")
def testRaise(self):
# 'raise' test [',' test]
try: raise RuntimeError, 'just testing'
except RuntimeError: pass
try: raise KeyboardInterrupt
except KeyboardInterrupt: pass
def testImport(self):
# 'import' dotted_as_names
import sys
import time, sys
# 'from' dotted_name 'import' ('*' | '(' import_as_names ')' | import_as_names)
from time import time
from time import (time)
# not testable inside a function, but already done at top of the module
# from sys import *
from sys import path, argv
from sys import (path, argv)
from sys import (path, argv,)
def testGlobal(self):
# 'global' NAME (',' NAME)*
global a
global a, b
global one, two, three, four, five, six, seven, eight, nine, ten
def testExec(self):
# 'exec' expr ['in' expr [',' expr]]
z = None
del z
exec 'z=1+1\n'
if z != 2: self.fail('exec \'z=1+1\'\\n')
del z
exec 'z=1+1'
if z != 2: self.fail('exec \'z=1+1\'')
z = None
del z
import types
if hasattr(types, "UnicodeType"):
exec r"""if 1:
exec u'z=1+1\n'
if z != 2: self.fail('exec u\'z=1+1\'\\n')
del z
exec u'z=1+1'
if z != 2: self.fail('exec u\'z=1+1\'')"""
g = {}
exec 'z = 1' in g
if g.has_key('__builtins__'): del g['__builtins__']
if g != {'z': 1}: self.fail('exec \'z = 1\' in g')
g = {}
l = {}
import warnings
warnings.filterwarnings("ignore", "global statement", module="<string>")
exec 'global a; a = 1; b = 2' in g, l
if g.has_key('__builtins__'): del g['__builtins__']
if l.has_key('__builtins__'): del l['__builtins__']
if (g, l) != ({'a':1}, {'b':2}):
self.fail('exec ... in g (%s), l (%s)' %(g,l))
def testAssert(self):
# assert_stmt: 'assert' test [',' test]
assert 1
assert 1, 1
assert lambda x:x
assert 1, lambda x:x+1
try:
assert 0, "msg"
except AssertionError, e:
self.assertEquals(e.args[0], "msg")
else:
if __debug__:
self.fail("AssertionError not raised by assert 0")
### compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | funcdef | classdef
# Tested below
def testIf(self):
# 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
if 1: pass
if 1: pass
else: pass
if 0: pass
elif 0: pass
if 0: pass
elif 0: pass
elif 0: pass
elif 0: pass
else: pass
def testWhile(self):
# 'while' test ':' suite ['else' ':' suite]
while 0: pass
while 0: pass
else: pass
# Issue1920: "while 0" is optimized away,
# ensure that the "else" clause is still present.
x = 0
while 0:
x = 1
else:
x = 2
self.assertEquals(x, 2)
def testFor(self):
# 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite]
for i in 1, 2, 3: pass
for i, j, k in (): pass
else: pass
class Squares:
def __init__(self, max):
self.max = max
self.sofar = []
def __len__(self): return len(self.sofar)
def __getitem__(self, i):
if not 0 <= i < self.max: raise IndexError
n = len(self.sofar)
while n <= i:
self.sofar.append(n*n)
n = n+1
return self.sofar[i]
n = 0
for x in Squares(10): n = n+x
if n != 285:
self.fail('for over growing sequence')
result = []
for x, in [(1,), (2,), (3,)]:
result.append(x)
self.assertEqual(result, [1, 2, 3])
def testTry(self):
### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite]
### | 'try' ':' suite 'finally' ':' suite
### except_clause: 'except' [expr [('as' | ',') expr]]
try:
1/0
except ZeroDivisionError:
pass
else:
pass
try: 1/0
except EOFError: pass
except TypeError as msg: pass
except RuntimeError, msg: pass
except: pass
else: pass
try: 1/0
except (EOFError, TypeError, ZeroDivisionError): pass
try: 1/0
except (EOFError, TypeError, ZeroDivisionError), msg: pass
try: pass
finally: pass
def testSuite(self):
# simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT
if 1: pass
if 1:
pass
if 1:
#
#
#
pass
pass
#
pass
#
def testTest(self):
### and_test ('or' and_test)*
### and_test: not_test ('and' not_test)*
### not_test: 'not' not_test | comparison
if not 1: pass
if 1 and 1: pass
if 1 or 1: pass
if not not not 1: pass
if not 1 and 1 and 1: pass
if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass
def testComparison(self):
### comparison: expr (comp_op expr)*
### comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
if 1: pass
x = (1 == 1)
if 1 == 1: pass
if 1 != 1: pass
if 1 <> 1: pass
if 1 < 1: pass
if 1 > 1: pass
if 1 <= 1: pass
if 1 >= 1: pass
if 1 is 1: pass
if 1 is not 1: pass
if 1 in (): pass
if 1 not in (): pass
if 1 < 1 > 1 == 1 >= 1 <= 1 <> 1 != 1 in 1 not in 1 is 1 is not 1: pass
def testBinaryMaskOps(self):
x = 1 & 1
x = 1 ^ 1
x = 1 | 1
def testShiftOps(self):
x = 1 << 1
x = 1 >> 1
x = 1 << 1 >> 1
def testAdditiveOps(self):
x = 1
x = 1 + 1
x = 1 - 1 - 1
x = 1 - 1 + 1 - 1 + 1
def testMultiplicativeOps(self):
x = 1 * 1
x = 1 / 1
x = 1 % 1
x = 1 / 1 * 1 % 1
def testUnaryOps(self):
x = +1
x = -1
x = ~1
x = ~1 ^ 1 & 1 | 1 & 1 ^ -1
x = -1*1/1 + 1*1 - ---1*1
def testSelectors(self):
### trailer: '(' [testlist] ')' | '[' subscript ']' | '.' NAME
### subscript: expr | [expr] ':' [expr]
import sys, time
c = sys.path[0]
x = time.time()
x = sys.modules['time'].time()
a = '01234'
c = a[0]
c = a[-1]
s = a[0:5]
s = a[:5]
s = a[0:]
s = a[:]
s = a[-5:]
s = a[:-1]
s = a[-4:-3]
# A rough test of SF bug 1333982. http://python.org/sf/1333982
# The testing here is fairly incomplete.
# Test cases should include: commas with 1 and 2 colons
d = {}
d[1] = 1
d[1,] = 2
d[1,2] = 3
d[1,2,3] = 4
L = list(d)
L.sort()
self.assertEquals(str(L), '[1, (1,), (1, 2), (1, 2, 3)]')
def testAtoms(self):
### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictmaker] '}' | '`' testlist '`' | NAME | NUMBER | STRING
### dictmaker: test ':' test (',' test ':' test)* [',']
x = (1)
x = (1 or 2 or 3)
x = (1 or 2 or 3, 2, 3)
x = []
x = [1]
x = [1 or 2 or 3]
x = [1 or 2 or 3, 2, 3]
x = []
x = {}
x = {'one': 1}
x = {'one': 1,}
x = {'one' or 'two': 1 or 2}
x = {'one': 1, 'two': 2}
x = {'one': 1, 'two': 2,}
x = {'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6}
x = `x`
x = `1 or 2 or 3`
self.assertEqual(`1,2`, '(1, 2)')
x = x
x = 'x'
x = 123
### exprlist: expr (',' expr)* [',']
### testlist: test (',' test)* [',']
# These have been exercised enough above
def testClassdef(self):
# 'class' NAME ['(' [testlist] ')'] ':' suite
class B: pass
class B2(): pass
class C1(B): pass
class C2(B): pass
class D(C1, C2, B): pass
class C:
def meth1(self): pass
def meth2(self, arg): pass
def meth3(self, a1, a2): pass
# decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
# decorators: decorator+
# decorated: decorators (classdef | funcdef)
def class_decorator(x):
x.decorated = True
return x
@class_decorator
class G:
pass
self.assertEqual(G.decorated, True)
def testListcomps(self):
# list comprehension tests
nums = [1, 2, 3, 4, 5]
strs = ["Apple", "Banana", "Coconut"]
spcs = [" Apple", " Banana ", "Coco nut "]
self.assertEqual([s.strip() for s in spcs], ['Apple', 'Banana', 'Coco nut'])
self.assertEqual([3 * x for x in nums], [3, 6, 9, 12, 15])
self.assertEqual([x for x in nums if x > 2], [3, 4, 5])
self.assertEqual([(i, s) for i in nums for s in strs],
[(1, 'Apple'), (1, 'Banana'), (1, 'Coconut'),
(2, 'Apple'), (2, 'Banana'), (2, 'Coconut'),
(3, 'Apple'), (3, 'Banana'), (3, 'Coconut'),
(4, 'Apple'), (4, 'Banana'), (4, 'Coconut'),
(5, 'Apple'), (5, 'Banana'), (5, 'Coconut')])
self.assertEqual([(i, s) for i in nums for s in [f for f in strs if "n" in f]],
[(1, 'Banana'), (1, 'Coconut'), (2, 'Banana'), (2, 'Coconut'),
(3, 'Banana'), (3, 'Coconut'), (4, 'Banana'), (4, 'Coconut'),
(5, 'Banana'), (5, 'Coconut')])
self.assertEqual([(lambda a:[a**i for i in range(a+1)])(j) for j in range(5)],
[[1], [1, 1], [1, 2, 4], [1, 3, 9, 27], [1, 4, 16, 64, 256]])
def test_in_func(l):
return [None < x < 3 for x in l if x > 2]
self.assertEqual(test_in_func(nums), [False, False, False])
def test_nested_front():
self.assertEqual([[y for y in [x, x + 1]] for x in [1,3,5]],
[[1, 2], [3, 4], [5, 6]])
test_nested_front()
check_syntax_error(self, "[i, s for i in nums for s in strs]")
check_syntax_error(self, "[x if y]")
suppliers = [
(1, "Boeing"),
(2, "Ford"),
(3, "Macdonalds")
]
parts = [
(10, "Airliner"),
(20, "Engine"),
(30, "Cheeseburger")
]
suppart = [
(1, 10), (1, 20), (2, 20), (3, 30)
]
x = [
(sname, pname)
for (sno, sname) in suppliers
for (pno, pname) in parts
for (sp_sno, sp_pno) in suppart
if sno == sp_sno and pno == sp_pno
]
self.assertEqual(x, [('Boeing', 'Airliner'), ('Boeing', 'Engine'), ('Ford', 'Engine'),
('Macdonalds', 'Cheeseburger')])
def testGenexps(self):
# generator expression tests
g = ([x for x in range(10)] for x in range(1))
self.assertEqual(g.next(), [x for x in range(10)])
try:
g.next()
self.fail('should produce StopIteration exception')
except StopIteration:
pass
a = 1
try:
g = (a for d in a)
g.next()
self.fail('should produce TypeError')
except TypeError:
pass
self.assertEqual(list((x, y) for x in 'abcd' for y in 'abcd'), [(x, y) for x in 'abcd' for y in 'abcd'])
self.assertEqual(list((x, y) for x in 'ab' for y in 'xy'), [(x, y) for x in 'ab' for y in 'xy'])
a = [x for x in range(10)]
b = (x for x in (y for y in a))
self.assertEqual(sum(b), sum([x for x in range(10)]))
self.assertEqual(sum(x**2 for x in range(10)), sum([x**2 for x in range(10)]))
self.assertEqual(sum(x*x for x in range(10) if x%2), sum([x*x for x in range(10) if x%2]))
self.assertEqual(sum(x for x in (y for y in range(10))), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in (y for y in (z for z in range(10)))), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in [y for y in (z for z in range(10))]), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True)) if True), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True) if False) if True), 0)
check_syntax_error(self, "foo(x for x in range(10), 100)")
check_syntax_error(self, "foo(100, x for x in range(10))")
def testComprehensionSpecials(self):
# test for outmost iterable precomputation
x = 10; g = (i for i in range(x)); x = 5
self.assertEqual(len(list(g)), 10)
# This should hold, since we're only precomputing outmost iterable.
x = 10; t = False; g = ((i,j) for i in range(x) if t for j in range(x))
x = 5; t = True;
self.assertEqual([(i,j) for i in range(10) for j in range(5)], list(g))
# Grammar allows multiple adjacent 'if's in listcomps and genexps,
# even though it's silly. Make sure it works (ifelse broke this.)
self.assertEqual([ x for x in range(10) if x % 2 if x % 3 ], [1, 5, 7])
self.assertEqual(list(x for x in range(10) if x % 2 if x % 3), [1, 5, 7])
# verify unpacking single element tuples in listcomp/genexp.
self.assertEqual([x for x, in [(4,), (5,), (6,)]], [4, 5, 6])
self.assertEqual(list(x for x, in [(7,), (8,), (9,)]), [7, 8, 9])
def test_with_statement(self):
class manager(object):
def __enter__(self):
return (1, 2)
def __exit__(self, *args):
pass
with manager():
pass
with manager() as x:
pass
with manager() as (x, y):
pass
with manager(), manager():
pass
with manager() as x, manager() as y:
pass
with manager() as x, manager():
pass
def testIfElseExpr(self):
# Test ifelse expressions in various cases
def _checkeval(msg, ret):
"helper to check that evaluation of expressions is done correctly"
print x
return ret
self.assertEqual([ x() for x in lambda: True, lambda: False if x() ], [True])
self.assertEqual([ x() for x in (lambda: True, lambda: False) if x() ], [True])
self.assertEqual([ x(False) for x in (lambda x: False if x else True, lambda x: True if x else False) if x(False) ], [True])
self.assertEqual((5 if 1 else _checkeval("check 1", 0)), 5)
self.assertEqual((_checkeval("check 2", 0) if 0 else 5), 5)
self.assertEqual((5 and 6 if 0 else 1), 1)
self.assertEqual(((5 and 6) if 0 else 1), 1)
self.assertEqual((5 and (6 if 1 else 1)), 6)
self.assertEqual((0 or _checkeval("check 3", 2) if 0 else 3), 3)
self.assertEqual((1 or _checkeval("check 4", 2) if 1 else _checkeval("check 5", 3)), 1)
self.assertEqual((0 or 5 if 1 else _checkeval("check 6", 3)), 5)
self.assertEqual((not 5 if 1 else 1), False)
self.assertEqual((not 5 if 0 else 1), 1)
self.assertEqual((6 + 1 if 1 else 2), 7)
self.assertEqual((6 - 1 if 1 else 2), 5)
self.assertEqual((6 * 2 if 1 else 4), 12)
self.assertEqual((6 / 2 if 1 else 3), 3)
self.assertEqual((6 < 4 if 0 else 2), 2)
def test_main():
run_unittest(TokenTests, GrammarTests)
if __name__ == '__main__':
test_main()
| gpl-3.0 | -4,638,273,122,655,081,000 | 30.806982 | 132 | 0.460224 | false |
thaim/ansible | lib/ansible/modules/cloud/vmware/vmware_guest_file_operation.py | 23 | 20590 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Stéphane Travassac <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_guest_file_operation
short_description: Files operation in a VMware guest operating system without network
description:
- Module to copy a file to a VM, fetch a file from a VM and create or delete a directory in the guest OS.
version_added: "2.5"
author:
- Stéphane Travassac (@stravassac)
notes:
- Tested on vSphere 6
- Only the first match against vm_id is used, even if there are multiple matches
requirements:
- "python >= 2.6"
- PyVmomi
- requests
options:
datacenter:
description:
- The datacenter hosting the virtual machine.
- If set, it will help to speed up virtual machine search.
type: str
cluster:
description:
- The cluster hosting the virtual machine.
- If set, it will help to speed up virtual machine search.
type: str
folder:
description:
- Destination folder, absolute path to find an existing guest or create the new guest.
- The folder should include the datacenter. ESX's datacenter is ha-datacenter
- Used only if C(vm_id_type) is C(inventory_path).
- 'Examples:'
- ' folder: /ha-datacenter/vm'
- ' folder: ha-datacenter/vm'
- ' folder: /datacenter1/vm'
- ' folder: datacenter1/vm'
- ' folder: /datacenter1/vm/folder1'
- ' folder: datacenter1/vm/folder1'
- ' folder: /folder1/datacenter1/vm'
- ' folder: folder1/datacenter1/vm'
- ' folder: /folder1/datacenter1/vm/folder2'
- ' folder: vm/folder2'
- ' folder: folder2'
type: str
vm_id:
description:
- Name of the virtual machine to work with.
required: True
type: str
vm_id_type:
description:
- The VMware identification method by which the virtual machine will be identified.
default: vm_name
choices:
- 'uuid'
- 'instance_uuid'
- 'dns_name'
- 'inventory_path'
- 'vm_name'
type: str
vm_username:
description:
- The user to login in to the virtual machine.
required: True
type: str
vm_password:
description:
- The password used to login-in to the virtual machine.
required: True
type: str
directory:
description:
- Create or delete a directory.
- Can be used to create temp directory inside guest using mktemp operation.
- mktemp sets variable C(dir) in the result with the name of the new directory.
- mktemp operation option is added in version 2.8
- 'Valid attributes are:'
- ' operation (str): Valid values are: create, delete, mktemp'
- ' path (str): directory path (required for create or remove)'
- ' prefix (str): temporary directory prefix (required for mktemp)'
- ' suffix (str): temporary directory suffix (required for mktemp)'
- ' recurse (boolean): Not required, default (false)'
required: False
type: dict
copy:
description:
- Copy file to vm without requiring network.
- 'Valid attributes are:'
- ' src: file source absolute or relative'
- ' dest: file destination, path must be exist'
- ' overwrite: False or True (not required, default False)'
required: False
type: dict
fetch:
description:
- Get file from virtual machine without requiring network.
- 'Valid attributes are:'
- ' src: The file on the remote system to fetch. This I(must) be a file, not a directory'
- ' dest: file destination on localhost, path must be exist'
required: False
type: dict
version_added: 2.5
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
- name: Create directory inside a vm
vmware_guest_file_operation:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
datacenter: "{{ datacenter_name }}"
validate_certs: no
vm_id: "{{ guest_name }}"
vm_username: "{{ guest_username }}"
vm_password: "{{ guest_userpassword }}"
directory:
path: "/test"
operation: create
recurse: no
delegate_to: localhost
- name: copy file to vm
vmware_guest_file_operation:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
datacenter: "{{ datacenter_name }}"
vm_id: "{{ guest_name }}"
vm_username: "{{ guest_username }}"
vm_password: "{{ guest_userpassword }}"
copy:
src: "files/test.zip"
dest: "/root/test.zip"
overwrite: False
delegate_to: localhost
- name: fetch file from vm
vmware_guest_file_operation:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
datacenter: "{{ datacenter_name }}"
vm_id: "{{ guest_name }}"
vm_username: "{{ guest_username }}"
vm_password: "{{ guest_userpassword }}"
fetch:
src: "/root/test.zip"
dest: "files/test.zip"
delegate_to: localhost
'''
RETURN = r'''
'''
try:
from pyVmomi import vim, vmodl
except ImportError:
pass
import os
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils import urls
from ansible.module_utils._text import to_bytes, to_native
from ansible.module_utils.vmware import (PyVmomi, find_cluster_by_name, find_datacenter_by_name,
find_vm_by_id, vmware_argument_spec)
class VmwareGuestFileManager(PyVmomi):
def __init__(self, module):
super(VmwareGuestFileManager, self).__init__(module)
datacenter_name = module.params['datacenter']
cluster_name = module.params['cluster']
folder = module.params['folder']
datacenter = None
if datacenter_name:
datacenter = find_datacenter_by_name(self.content, datacenter_name)
if not datacenter:
module.fail_json(msg="Unable to find %(datacenter)s datacenter" % module.params)
cluster = None
if cluster_name:
cluster = find_cluster_by_name(self.content, cluster_name, datacenter)
if not cluster:
module.fail_json(msg="Unable to find %(cluster)s cluster" % module.params)
if module.params['vm_id_type'] == 'inventory_path':
vm = find_vm_by_id(self.content, vm_id=module.params['vm_id'], vm_id_type="inventory_path", folder=folder)
else:
vm = find_vm_by_id(self.content,
vm_id=module.params['vm_id'],
vm_id_type=module.params['vm_id_type'],
datacenter=datacenter,
cluster=cluster)
if not vm:
module.fail_json(msg='Unable to find virtual machine.')
self.vm = vm
try:
result = dict(changed=False)
if module.params['directory']:
result = self.directory()
if module.params['copy']:
result = self.copy()
if module.params['fetch']:
result = self.fetch()
module.exit_json(**result)
except vmodl.RuntimeFault as runtime_fault:
module.fail_json(msg=to_native(runtime_fault.msg))
except vmodl.MethodFault as method_fault:
module.fail_json(msg=to_native(method_fault.msg))
except Exception as e:
module.fail_json(msg=to_native(e))
def directory(self):
result = dict(changed=True, uuid=self.vm.summary.config.uuid)
vm_username = self.module.params['vm_username']
vm_password = self.module.params['vm_password']
recurse = bool(self.module.params['directory']['recurse'])
operation = self.module.params['directory']['operation']
path = self.module.params['directory']['path']
prefix = self.module.params['directory']['prefix']
suffix = self.module.params['directory']['suffix']
creds = vim.vm.guest.NamePasswordAuthentication(username=vm_username, password=vm_password)
file_manager = self.content.guestOperationsManager.fileManager
if operation in ("create", "mktemp"):
try:
if operation == "create":
file_manager.MakeDirectoryInGuest(vm=self.vm,
auth=creds,
directoryPath=path,
createParentDirectories=recurse)
else:
newdir = file_manager.CreateTemporaryDirectoryInGuest(vm=self.vm, auth=creds,
prefix=prefix, suffix=suffix)
result['dir'] = newdir
except vim.fault.FileAlreadyExists as file_already_exists:
result['changed'] = False
result['msg'] = "Guest directory %s already exist: %s" % (path,
to_native(file_already_exists.msg))
except vim.fault.GuestPermissionDenied as permission_denied:
self.module.fail_json(msg="Permission denied for path %s : %s" % (path,
to_native(permission_denied.msg)),
uuid=self.vm.summary.config.uuid)
except vim.fault.InvalidGuestLogin as invalid_guest_login:
self.module.fail_json(msg="Invalid guest login for user %s : %s" % (vm_username,
to_native(invalid_guest_login.msg)),
uuid=self.vm.summary.config.uuid)
# other exceptions
except Exception as e:
self.module.fail_json(msg="Failed to Create directory into VM VMware exception : %s" % to_native(e),
uuid=self.vm.summary.config.uuid)
if operation == "delete":
try:
file_manager.DeleteDirectoryInGuest(vm=self.vm, auth=creds, directoryPath=path,
recursive=recurse)
except vim.fault.FileNotFound as file_not_found:
result['changed'] = False
result['msg'] = "Guest directory %s not exists %s" % (path,
to_native(file_not_found.msg))
except vim.fault.FileFault as e:
self.module.fail_json(msg="FileFault : %s" % e.msg,
uuid=self.vm.summary.config.uuid)
except vim.fault.GuestPermissionDenied as permission_denied:
self.module.fail_json(msg="Permission denied for path %s : %s" % (path,
to_native(permission_denied.msg)),
uuid=self.vm.summary.config.uuid)
except vim.fault.InvalidGuestLogin as invalid_guest_login:
self.module.fail_json(msg="Invalid guest login for user %s : %s" % (vm_username,
to_native(invalid_guest_login.msg)),
uuid=self.vm.summary.config.uuid)
# other exceptions
except Exception as e:
self.module.fail_json(msg="Failed to Delete directory into Vm VMware exception : %s" % to_native(e),
uuid=self.vm.summary.config.uuid)
return result
def fetch(self):
result = dict(changed=True, uuid=self.vm.summary.config.uuid)
vm_username = self.module.params['vm_username']
vm_password = self.module.params['vm_password']
hostname = self.module.params['hostname']
dest = self.module.params["fetch"]['dest']
src = self.module.params['fetch']['src']
creds = vim.vm.guest.NamePasswordAuthentication(username=vm_username, password=vm_password)
file_manager = self.content.guestOperationsManager.fileManager
try:
fileTransferInfo = file_manager.InitiateFileTransferFromGuest(vm=self.vm, auth=creds,
guestFilePath=src)
url = fileTransferInfo.url
url = url.replace("*", hostname)
resp, info = urls.fetch_url(self.module, url, method="GET")
try:
with open(dest, "wb") as local_file:
local_file.write(resp.read())
except Exception as e:
self.module.fail_json(msg="local file write exception : %s" % to_native(e),
uuid=self.vm.summary.config.uuid)
except vim.fault.FileNotFound as file_not_found:
self.module.fail_json(msg="Guest file %s does not exist : %s" % (src, to_native(file_not_found.msg)),
uuid=self.vm.summary.config.uuid)
except vim.fault.FileFault as e:
self.module.fail_json(msg="FileFault : %s" % to_native(e.msg),
uuid=self.vm.summary.config.uuid)
except vim.fault.GuestPermissionDenied:
self.module.fail_json(msg="Permission denied to fetch file %s" % src,
uuid=self.vm.summary.config.uuid)
except vim.fault.InvalidGuestLogin:
self.module.fail_json(msg="Invalid guest login for user %s" % vm_username,
uuid=self.vm.summary.config.uuid)
# other exceptions
except Exception as e:
self.module.fail_json(msg="Failed to Fetch file from Vm VMware exception : %s" % to_native(e),
uuid=self.vm.summary.config.uuid)
return result
def copy(self):
result = dict(changed=True, uuid=self.vm.summary.config.uuid)
vm_username = self.module.params['vm_username']
vm_password = self.module.params['vm_password']
hostname = self.module.params['hostname']
overwrite = self.module.params["copy"]["overwrite"]
dest = self.module.params["copy"]['dest']
src = self.module.params['copy']['src']
b_src = to_bytes(src, errors='surrogate_or_strict')
if not os.path.exists(b_src):
self.module.fail_json(msg="Source %s not found" % src)
if not os.access(b_src, os.R_OK):
self.module.fail_json(msg="Source %s not readable" % src)
if os.path.isdir(b_src):
self.module.fail_json(msg="copy does not support copy of directory: %s" % src)
data = None
with open(b_src, "rb") as local_file:
data = local_file.read()
file_size = os.path.getsize(b_src)
creds = vim.vm.guest.NamePasswordAuthentication(username=vm_username, password=vm_password)
file_attributes = vim.vm.guest.FileManager.FileAttributes()
file_manager = self.content.guestOperationsManager.fileManager
try:
url = file_manager.InitiateFileTransferToGuest(vm=self.vm, auth=creds, guestFilePath=dest,
fileAttributes=file_attributes, overwrite=overwrite,
fileSize=file_size)
url = url.replace("*", hostname)
resp, info = urls.fetch_url(self.module, url, data=data, method="PUT")
status_code = info["status"]
if status_code != 200:
self.module.fail_json(msg='problem during file transfer, http message:%s' % info,
uuid=self.vm.summary.config.uuid)
except vim.fault.FileAlreadyExists:
result['changed'] = False
result['msg'] = "Guest file %s already exists" % dest
return result
except vim.fault.FileFault as e:
self.module.fail_json(msg="FileFault:%s" % to_native(e.msg),
uuid=self.vm.summary.config.uuid)
except vim.fault.GuestPermissionDenied as permission_denied:
self.module.fail_json(msg="Permission denied to copy file into "
"destination %s : %s" % (dest, to_native(permission_denied.msg)),
uuid=self.vm.summary.config.uuid)
except vim.fault.InvalidGuestLogin as invalid_guest_login:
self.module.fail_json(msg="Invalid guest login for user"
" %s : %s" % (vm_username, to_native(invalid_guest_login.msg)))
# other exceptions
except Exception as e:
self.module.fail_json(msg="Failed to Copy file to Vm VMware exception : %s" % to_native(e),
uuid=self.vm.summary.config.uuid)
return result
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(dict(
datacenter=dict(type='str'),
cluster=dict(type='str'),
folder=dict(type='str'),
vm_id=dict(type='str', required=True),
vm_id_type=dict(
default='vm_name',
type='str',
choices=['inventory_path', 'uuid', 'instance_uuid', 'dns_name', 'vm_name']),
vm_username=dict(type='str', required=True),
vm_password=dict(type='str', no_log=True, required=True),
directory=dict(
type='dict',
default=None,
options=dict(
operation=dict(required=True, type='str', choices=['create', 'delete', 'mktemp']),
path=dict(required=False, type='str'),
prefix=dict(required=False, type='str'),
suffix=dict(required=False, type='str'),
recurse=dict(required=False, type='bool', default=False)
)
),
copy=dict(
type='dict',
default=None,
options=dict(src=dict(required=True, type='str'),
dest=dict(required=True, type='str'),
overwrite=dict(required=False, type='bool', default=False)
)
),
fetch=dict(
type='dict',
default=None,
options=dict(
src=dict(required=True, type='str'),
dest=dict(required=True, type='str'),
)
)
)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False,
required_if=[['vm_id_type', 'inventory_path', ['folder']]],
mutually_exclusive=[['directory', 'copy', 'fetch']],
required_one_of=[['directory', 'copy', 'fetch']],
)
if module.params['directory']:
if module.params['directory']['operation'] in ('create', 'delete') and not module.params['directory']['path']:
module.fail_json(msg='directory.path is required when operation is "create" or "delete"')
if module.params['directory']['operation'] == 'mktemp' and not (module.params['directory']['prefix'] and module.params['directory']['suffix']):
module.fail_json(msg='directory.prefix and directory.suffix are required when operation is "mktemp"')
if module.params['vm_id_type'] == 'inventory_path' and not module.params['folder']:
module.fail_json(msg='Folder is required parameter when vm_id_type is inventory_path')
vmware_guest_file_manager = VmwareGuestFileManager(module)
if __name__ == '__main__':
main()
| mit | 1,168,577,169,258,964,700 | 43.756522 | 151 | 0.553283 | false |
paramecio/parameciofm | paramecio/cromosoma/extrafields/slugifyfield.py | 1 | 1059 | #!/usr/bin/env python3
from paramecio.cromosoma.corefields import CharField
from paramecio.citoplasma.slugify import slugify
from paramecio.cromosoma.coreforms import HiddenForm
class SlugifyField(CharField):
def __init__(self, name, size=255, field_related=None, required=False):
super(SlugifyField, self).__init__(name, size, required)
self.name_form=HiddenForm
self.field_related=field_related
def check(self, value):
value=slugify(value)
if value=='':
if self.model!=None and self.field_related!=None:
self.model.post[self.field_related]=self.model.post.get(self.field_related, '')
value=slugify(self.model.post[self.field_related])
if value=='':
self.error=True
self.error_txt='Value is empty'
return ''
return value
| gpl-3.0 | 293,104,214,508,820,740 | 27.621622 | 95 | 0.531634 | false |
rs91092/subtitle-downloader | subtitle-downloader.py | 1 | 4389 | #-------------------------------------------------------------------------------
# Name : subtitle downloader
# Purpose : One step subtitle download
#
# Authors : manoj m j, arun shivaram p, Valentin Vetter, niroyb
# Edited by : Valentin Vetter
# Created :
# Copyright : (c) www.manojmj.com
# Licence : GPL v3
#-------------------------------------------------------------------------------
# TODO: use another DB if subs are not found on subDB
import hashlib
import os
import sys
import logging
import requests,time,re,zipfile
from bs4 import BeautifulSoup
PY_VERSION = sys.version_info[0]
if PY_VERSION == 2:
import urllib2
if PY_VERSION == 3:
import urllib.request
def get_hash(file_path):
read_size = 64 * 1024
with open(file_path, 'rb') as f:
data = f.read(read_size)
f.seek(-read_size, os.SEEK_END)
data += f.read(read_size)
return hashlib.md5(data).hexdigest()
def sub_downloader(file_path):
# Put the code in a try catch block in order to continue for other video files, if it fails during execution
try:
# Skip this file if it is not a video
root, extension = os.path.splitext(file_path)
if extension not in [".avi", ".mp4", ".mkv", ".mpg", ".mpeg", ".mov", ".rm", ".vob", ".wmv", ".flv", ".3gp",".3g2"]:
return
if not os.path.exists(root + ".srt"):
headers = {'User-Agent': 'SubDB/1.0 (subtitle-downloader/1.0; http://github.com/manojmj92/subtitle-downloader)'}
url = "http://api.thesubdb.com/?action=download&hash=" + get_hash(file_path) + "&language=en"
if PY_VERSION == 3:
req = urllib.request.Request(url, None, headers)
response = urllib.request.urlopen(req).read()
if PY_VERSION == 2:
req = urllib2.Request(url, '', headers)
response = urllib2.urlopen(req).read()
with open(root + ".srt", "wb") as subtitle:
subtitle.write(response)
logging.info("Subtitle successfully downloaded for " + file_path)
except:
#download subs from subscene if not found in subdb
sub_downloader2(file_path)
def sub_downloader2(file_path):
try:
root, extension = os.path.splitext(file_path)
if extension not in [".avi", ".mp4", ".mkv", ".mpg", ".mpeg", ".mov", ".rm", ".vob", ".wmv", ".flv", ".3gp",".3g2"]:
return
if os.path.exists(root + ".srt"):
return
j=-1
root2=root
for i in range(0,len(root)):
if(root[i]=="\\"):
j=i
root=root2[j+1:]
root2=root2[:j+1]
r=requests.get("http://subscene.com/subtitles/release?q="+root);
soup=BeautifulSoup(r.content,"lxml")
atags=soup.find_all("a")
href=""
for i in range(0,len(atags)):
spans=atags[i].find_all("span")
if(len(spans)==2 and spans[0].get_text().strip()=="English"):
href=atags[i].get("href").strip()
if(len(href)>0):
r=requests.get("http://subscene.com"+href);
soup=BeautifulSoup(r.content,"lxml")
lin=soup.find_all('a',attrs={'id':'downloadButton'})[0].get("href")
r=requests.get("http://subscene.com"+lin);
soup=BeautifulSoup(r.content,"lxml")
subfile=open(root2+".zip", 'wb')
for chunk in r.iter_content(100000):
subfile.write(chunk)
subfile.close()
time.sleep(1)
zip=zipfile.ZipFile(root2+".zip")
zip.extractall(root2)
zip.close()
os.unlink(root2+".zip")
except:
#Ignore exception and continue
print("Error in fetching subtitle for " + file_path)
print("Error", sys.exc_info())
logging.error("Error in fetching subtitle for " + file_path + str(sys.exc_info()))
def main():
root, _ = os.path.splitext(sys.argv[0])
logging.basicConfig(filename=root + '.log', level=logging.INFO)
logging.info("Started with params " + str(sys.argv))
if len(sys.argv) == 1:
print("This program requires at least one parameter")
sys.exit(1)
for path in sys.argv:
if os.path.isdir(path):
# Iterate the root directory recursively using os.walk and for each video file present get the subtitle
for dir_path, _, file_names in os.walk(path):
for filename in file_names:
file_path = os.path.join(dir_path, filename)
sub_downloader(file_path)
else:
sub_downloader(path)
if __name__ == '__main__':
main()
| gpl-3.0 | -746,804,804,849,228,800 | 34.395161 | 124 | 0.586466 | false |
TobbeEdgeware/shaka-player | third_party/gjslint/closure_linter-2.3.13/closure_linter/scopeutil.py | 25 | 5563 | #!/usr/bin/env python
#
# Copyright 2012 The Closure Linter Authors. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools to match goog.scope alias statements."""
# Allow non-Google copyright
# pylint: disable=g-bad-file-header
__author__ = ('[email protected] (Nathan Naze)')
import itertools
from closure_linter import ecmametadatapass
from closure_linter import tokenutil
from closure_linter.javascripttokens import JavaScriptTokenType
def IsGoogScopeBlock(context):
"""Whether the given context is a goog.scope block.
This function only checks that the block is a function block inside
a goog.scope() call.
TODO(nnaze): Implement goog.scope checks that verify the call is
in the root context and contains only a single function literal.
Args:
context: An EcmaContext of type block.
Returns:
Whether the context is a goog.scope block.
"""
if context.type != ecmametadatapass.EcmaContext.BLOCK:
return False
if not _IsFunctionLiteralBlock(context):
return False
# Check that this function is contained by a group
# of form "goog.scope(...)".
parent = context.parent
if parent and parent.type is ecmametadatapass.EcmaContext.GROUP:
last_code_token = parent.start_token.metadata.last_code
if (last_code_token and
last_code_token.type is JavaScriptTokenType.IDENTIFIER and
last_code_token.string == 'goog.scope'):
return True
return False
def _IsFunctionLiteralBlock(block_context):
"""Check if a context is a function literal block (without parameters).
Example function literal block: 'function() {}'
Args:
block_context: An EcmaContext of type block.
Returns:
Whether this context is a function literal block.
"""
previous_code_tokens_iter = itertools.ifilter(
lambda token: token not in JavaScriptTokenType.NON_CODE_TYPES,
reversed(block_context.start_token))
# Ignore the current token
next(previous_code_tokens_iter, None)
# Grab the previous three tokens and put them in correct order.
previous_code_tokens = list(itertools.islice(previous_code_tokens_iter, 3))
previous_code_tokens.reverse()
# There aren't three previous tokens.
if len(previous_code_tokens) is not 3:
return False
# Check that the previous three code tokens are "function ()"
previous_code_token_types = [token.type for token in previous_code_tokens]
if (previous_code_token_types == [
JavaScriptTokenType.FUNCTION_DECLARATION,
JavaScriptTokenType.START_PARAMETERS,
JavaScriptTokenType.END_PARAMETERS]):
return True
return False
def IsInClosurizedNamespace(symbol, closurized_namespaces):
"""Match a goog.scope alias.
Args:
symbol: An identifier like 'goog.events.Event'.
closurized_namespaces: Iterable of valid Closurized namespaces (strings).
Returns:
True if symbol is an identifier in a Closurized namespace, otherwise False.
"""
for ns in closurized_namespaces:
if symbol.startswith(ns + '.'):
return True
return False
def MatchAlias(context):
"""Match an alias statement (some identifier assigned to a variable).
Example alias: var MyClass = proj.longNamespace.MyClass.
Args:
context: An EcmaContext of type EcmaContext.STATEMENT.
Returns:
If a valid alias, returns a tuple of alias and symbol, otherwise None.
"""
if context.type != ecmametadatapass.EcmaContext.STATEMENT:
return
# Get the tokens in this statement.
if context.start_token and context.end_token:
statement_tokens = tokenutil.GetTokenRange(context.start_token,
context.end_token)
else:
return
# And now just those tokens that are actually code.
is_non_code_type = lambda t: t.type not in JavaScriptTokenType.NON_CODE_TYPES
code_tokens = filter(is_non_code_type, statement_tokens)
# This section identifies statements of the alias form "var alias = symbol".
# Pop off the semicolon if present.
if code_tokens and code_tokens[-1].IsType(JavaScriptTokenType.SEMICOLON):
code_tokens.pop()
if not (len(code_tokens) == 4 and
code_tokens[0].IsKeyword('var') and
(code_tokens[0].metadata.context.type ==
ecmametadatapass.EcmaContext.VAR)):
return
# Verify the only code tokens in this statement are part of the var
# declaration.
var_context = code_tokens[0].metadata.context
for token in code_tokens:
if token.metadata.context is not var_context:
return
# Verify that this is of the form "var lvalue = identifier;".
if not(code_tokens[0].IsKeyword('var') and
code_tokens[1].IsType(JavaScriptTokenType.SIMPLE_LVALUE) and
code_tokens[2].IsOperator('=') and
code_tokens[3].IsType(JavaScriptTokenType.IDENTIFIER)):
return
alias, symbol = code_tokens[1], code_tokens[3]
# Mark both tokens as an alias definition to avoid counting them as usages.
alias.metadata.is_alias_definition = True
symbol.metadata.is_alias_definition = True
return alias.string, symbol.string
| apache-2.0 | -518,120,842,690,524,500 | 29.905556 | 79 | 0.721373 | false |
bmachiel/python-nport | smith/create_smith_plot.py | 1 | 1537 | """
Contains a convenience function to create a ready-made Smith plot
"""
# Major library imports
from numpy import array, ndarray, transpose, real, imag
from enthought.chaco.array_data_source import ArrayDataSource
from enthought.chaco.data_range_1d import DataRange1D
# Local relative imports
from .smith_mapper import SmithMapper
from .smith_line_renderer import SmithLineRenderer
def create_smith_plot(data, orientation='h', color='black', width=1.0,
dash="solid", grid="dot", value_mapper_class=SmithMapper):
if (type(data) != ndarray) and (len(data) == 2):
data = transpose(array(data))
freqs, values = transpose(data)
index_data = real(values)
value_data = imag(values)
index = ArrayDataSource(index_data, sort_order='ascending')
# Typically the value data is unsorted
value = ArrayDataSource(value_data)
index_range = DataRange1D()
index_range.add(index)
index_mapper = SmithMapper(range=index_range)
value_range = DataRange1D()
value_range.add(value)
value_mapper = value_mapper_class(range=value_range)
plot = SmithLineRenderer(index=index, value=value,
index_mapper = index_mapper,
value_mapper = value_mapper,
orientation = orientation,
color = color,
line_width = width,
line_style = dash,
grid_style = grid)
return plot
| gpl-3.0 | 7,017,037,659,748,273,000 | 33.155556 | 80 | 0.618738 | false |
jotacor/tradunio-web | app/tradunio/comunio_service.py | 1 | 5174 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from bs4 import BeautifulSoup as bs
from datetime import date, timedelta, datetime
import re
from suds.client import Client
from ..models import Player
class Comunio:
def __init__(self):
self.client = Client(url='http://www.comunio.es/soapservice.php?wsdl')
self.today = date.today()
def get_player_price(self, playerid, from_date=None):
if from_date:
prices = list()
loop_date = datetime.strptime(from_date, '%Y-%m-%d').date()
while loop_date <= self.today:
price = self.client.service.getquote(playerid=playerid, date=loop_date)
prices.append([loop_date, price])
loop_date = loop_date + timedelta(days=1)
else:
price = self.client.service.getquote(playerid=playerid, date=self.today)
prices = [self.today, price]
return prices
def get_clubs(self):
clubs_comunio = self.client.service.getclubs()
clubs = list()
for club in clubs_comunio:
club_id = club.id[0]
club_name = club.name[0].encode('utf-8')
clubs.append([club_id, club_name])
return clubs
def get_playersbyclubid(self, club_id=None):
players_comunio, players_list = list(), list()
if not club_id:
clubs = self.get_clubs()
for club in clubs:
club_id, clubname = club
players_comunio.append(self.client.service.getplayersbyclubid(club_id))
else:
players_comunio.append(self.client.service.getplayersbyclubid(club_id))
for club_players in players_comunio:
for player in club_players:
players_list.append([
player.id[0],
player.name[0].encode('utf-8').strip(),
player.points[0],
player.clubid[0],
player.quote[0],
player.status[0].encode('utf-8'),
player.status_info[0].encode('utf-8') if player.status_info else None,
player.position[0].encode('utf-8'),
player.rankedgamesnumber[0]
])
return players_list
def get_market(self, community_id=None, user_id=None):
"""
Get the market from the community_id or the user_id
:param community_id:
:param user_id:
:return: [player_id, playername, points, club_id, market_price, min_price, status, injured, position, placed_date, owner_id]
"""
if not community_id:
community_id = self.client.service.getcommunityid(user_id)
market_comunio = self.client.service.getcommunitymarket(community_id)
market = list()
for listed in market_comunio:
market.append([
listed.id[0],
listed.name[0].encode('utf-8').strip(),
listed.points[0],
listed.clubid[0],
listed.quote[0],
listed.recommendedprice[0],
listed.status[0].encode('utf-8'),
listed.status_info[0].encode('utf-8') if listed.status_info else None,
listed.position[0].encode('utf-8'),
listed.placed[0],
listed.ownerid[0],
])
return market
def get_transactions(self, community_id=None, user_id=None):
if not community_id:
community_id = self.client.service.getcommunityid(user_id)
news_comunio = self.client.service.getcomputernews(community_id, 30, 30)
transactions = list()
pattern = re.compile(
ur'(?:(?:\\n)?([(\S+ )]+?)(?: cambia por )([0-9\.,]*?)(?: .*? de )(.+?) a (.+?)\.)', re.UNICODE)
for published in news_comunio:
if published.subject[0] == 'Fichajes':
message = bs(published.message[0].encode('utf-8'), "html.parser")
player_id = int(re.findall('\d+', message.a['href'])[0])
trans = re.findall(pattern, message.text)[0]
playername, value, fr, to = trans
transactions.append([
datetime.strptime(published.date[0][0:10], '%Y-%m-%d').date(),
player_id, playername, int(value.replace('.', '')), fr, to
])
return transactions
def get_gamedays(self):
gamedays_comunio = self.client.service.getgamedays()
gamedays_list = list()
for gamedays in gamedays_comunio:
gameday_id = int(gamedays[0][0].value[0])
number = int(gamedays[0][1].value[0])
gamedate = datetime.strptime(gamedays[0][3].value[0][0:10], '%Y-%m-%d').date()
shifted = bool(gamedays[0][4].value[0])
gamedays_list.append([number, gameday_id, gamedate, shifted])
return gamedays_list
# c = Comunio()
# print c.get_player_price(3, '2016-05-01')
# print c.get_clubs()
# print c.get_market(user_id=15797714)
# a=c.get_transactions(user_id=15797714)
# print c.get_playersbyclubid(15)
# print c.get_gamedays()
# pass
| mit | 680,974,589,956,552,600 | 36.766423 | 132 | 0.553923 | false |
manishpatell/erpcustomizationssaiimpex123qwe | addons/delivery/__openerp__.py | 113 | 1902 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Delivery Costs',
'version': '1.0',
'category': 'Sales Management',
'description': """
Allows you to add delivery methods in sale orders and picking.
==============================================================
You can define your own carrier and delivery grids for prices. When creating
invoices from picking, OpenERP is able to add and compute the shipping line.
""",
'author': 'OpenERP SA',
'depends': ['sale_stock'],
'data': [
'security/ir.model.access.csv',
'delivery_view.xml',
'partner_view.xml',
'delivery_data.xml',
'views/report_shipping.xml',
],
'demo': ['delivery_demo.xml'],
'test': ['test/delivery_cost.yml'],
'installable': True,
'auto_install': False,
'images': ['images/1_delivery_method.jpeg','images/2_delivery_pricelist.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -4,920,677,306,770,915,000 | 37.816327 | 83 | 0.595163 | false |
bac/horizon | openstack_dashboard/usage/quotas.py | 2 | 17368 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import defaultdict
import itertools
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon.utils.memoized import memoized # noqa
from openstack_dashboard.api import base
from openstack_dashboard.api import cinder
from openstack_dashboard.api import network
from openstack_dashboard.api import neutron
from openstack_dashboard.api import nova
from openstack_dashboard import policy
LOG = logging.getLogger(__name__)
NOVA_QUOTA_FIELDS = ("metadata_items",
"cores",
"instances",
"injected_files",
"injected_file_content_bytes",
"ram",
"floating_ips",
"fixed_ips",
"security_groups",
"security_group_rules",)
MISSING_QUOTA_FIELDS = ("key_pairs",
"injected_file_path_bytes",)
CINDER_QUOTA_FIELDS = ("volumes",
"snapshots",
"gigabytes",)
NEUTRON_QUOTA_FIELDS = ("network",
"subnet",
"port",
"router",
"floatingip",
"security_group",
"security_group_rule",
)
QUOTA_FIELDS = NOVA_QUOTA_FIELDS + CINDER_QUOTA_FIELDS + NEUTRON_QUOTA_FIELDS
QUOTA_NAMES = {
"metadata_items": _('Metadata Items'),
"cores": _('VCPUs'),
"instances": _('Instances'),
"injected_files": _('Injected Files'),
"injected_file_content_bytes": _('Injected File Content Bytes'),
"ram": _('RAM (MB)'),
"floating_ips": _('Floating IPs'),
"fixed_ips": _('Fixed IPs'),
"security_groups": _('Security Groups'),
"security_group_rules": _('Security Group Rules'),
"key_pairs": _('Key Pairs'),
"injected_file_path_bytes": _('Injected File Path Bytes'),
"volumes": _('Volumes'),
"snapshots": _('Volume Snapshots'),
"gigabytes": _('Total Size of Volumes and Snapshots (GB)'),
"network": _("Networks"),
"subnet": _("Subnets"),
"port": _("Ports"),
"router": _("Routers"),
"floatingip": _('Floating IPs'),
"security_group": _("Security Groups"),
"security_group_rule": _("Security Group Rules")
}
class QuotaUsage(dict):
"""Tracks quota limit, used, and available for a given set of quotas."""
def __init__(self):
self.usages = defaultdict(dict)
def __contains__(self, key):
return key in self.usages
def __getitem__(self, key):
return self.usages[key]
def __setitem__(self, key, value):
raise NotImplementedError("Directly setting QuotaUsage values is not "
"supported. Please use the add_quota and "
"tally methods.")
def __repr__(self):
return repr(dict(self.usages))
def get(self, key, default=None):
return self.usages.get(key, default)
def add_quota(self, quota):
"""Adds an internal tracking reference for the given quota."""
if quota.limit is None or quota.limit == -1:
# Handle "unlimited" quotas.
self.usages[quota.name]['quota'] = float("inf")
self.usages[quota.name]['available'] = float("inf")
else:
self.usages[quota.name]['quota'] = int(quota.limit)
def tally(self, name, value):
"""Adds to the "used" metric for the given quota."""
value = value or 0 # Protection against None.
# Start at 0 if this is the first value.
if 'used' not in self.usages[name]:
self.usages[name]['used'] = 0
# Increment our usage and update the "available" metric.
self.usages[name]['used'] += int(value) # Fail if can't coerce to int.
self.update_available(name)
def update_available(self, name):
"""Updates the "available" metric for the given quota."""
quota = self.usages.get(name, {}).get('quota', float('inf'))
available = quota - self.usages[name]['used']
if available < 0:
available = 0
self.usages[name]['available'] = available
def _get_quota_data(request, tenant_mode=True, disabled_quotas=None,
tenant_id=None):
quotasets = []
if not tenant_id:
tenant_id = request.user.tenant_id
if disabled_quotas is None:
disabled_quotas = get_disabled_quotas(request)
qs = base.QuotaSet()
if 'instances' not in disabled_quotas:
if tenant_mode:
quotasets.append(nova.tenant_quota_get(request, tenant_id))
else:
quotasets.append(nova.default_quota_get(request, tenant_id))
if 'volumes' not in disabled_quotas:
try:
if tenant_mode:
quotasets.append(cinder.tenant_quota_get(request, tenant_id))
else:
quotasets.append(cinder.default_quota_get(request, tenant_id))
except cinder.cinder_exception.ClientException:
disabled_quotas.update(CINDER_QUOTA_FIELDS)
msg = _("Unable to retrieve volume limit information.")
exceptions.handle(request, msg)
for quota in itertools.chain(*quotasets):
if quota.name not in disabled_quotas:
qs[quota.name] = quota.limit
return qs
def get_default_quota_data(request, disabled_quotas=None, tenant_id=None):
return _get_quota_data(request,
tenant_mode=False,
disabled_quotas=disabled_quotas,
tenant_id=tenant_id)
def get_tenant_quota_data(request, disabled_quotas=None, tenant_id=None):
qs = _get_quota_data(request,
tenant_mode=True,
disabled_quotas=disabled_quotas,
tenant_id=tenant_id)
# TODO(jpichon): There is no API to get the default system quotas
# in Neutron (cf. LP#1204956), so for now handle tenant quotas here.
# This should be handled in _get_quota_data() eventually.
if not disabled_quotas:
return qs
# Check if neutron is enabled by looking for network and router
if 'network' and 'router' not in disabled_quotas:
tenant_id = tenant_id or request.user.tenant_id
neutron_quotas = neutron.tenant_quota_get(request, tenant_id)
if 'floating_ips' in disabled_quotas:
# Neutron with quota extension disabled
if 'floatingip' in disabled_quotas:
qs.add(base.QuotaSet({'floating_ips': -1}))
# Neutron with quota extension enabled
else:
# Rename floatingip to floating_ips since that's how it's
# expected in some places (e.g. Security & Access' Floating IPs)
fips_quota = neutron_quotas.get('floatingip').limit
qs.add(base.QuotaSet({'floating_ips': fips_quota}))
if 'security_groups' in disabled_quotas:
if 'security_group' in disabled_quotas:
qs.add(base.QuotaSet({'security_groups': -1}))
# Neutron with quota extension enabled
else:
# Rename security_group to security_groups since that's how it's
# expected in some places (e.g. Security & Access' Security Groups)
sec_quota = neutron_quotas.get('security_group').limit
qs.add(base.QuotaSet({'security_groups': sec_quota}))
if 'network' in disabled_quotas:
for item in qs.items:
if item.name == 'networks':
qs.items.remove(item)
break
else:
net_quota = neutron_quotas.get('network').limit
qs.add(base.QuotaSet({'networks': net_quota}))
if 'subnet' in disabled_quotas:
for item in qs.items:
if item.name == 'subnets':
qs.items.remove(item)
break
else:
net_quota = neutron_quotas.get('subnet').limit
qs.add(base.QuotaSet({'subnets': net_quota}))
if 'router' in disabled_quotas:
for item in qs.items:
if item.name == 'routers':
qs.items.remove(item)
break
else:
router_quota = neutron_quotas.get('router').limit
qs.add(base.QuotaSet({'routers': router_quota}))
return qs
def get_disabled_quotas(request):
disabled_quotas = set([])
# Cinder
if not cinder.is_volume_service_enabled(request):
disabled_quotas.update(CINDER_QUOTA_FIELDS)
# Neutron
if not base.is_service_enabled(request, 'network'):
disabled_quotas.update(NEUTRON_QUOTA_FIELDS)
else:
# Remove the nova network quotas
disabled_quotas.update(['floating_ips', 'fixed_ips'])
if neutron.is_extension_supported(request, 'security-group'):
# If Neutron security group is supported, disable Nova quotas
disabled_quotas.update(['security_groups', 'security_group_rules'])
else:
# If Nova security group is used, disable Neutron quotas
disabled_quotas.update(['security_group', 'security_group_rule'])
try:
if not neutron.is_quotas_extension_supported(request):
disabled_quotas.update(NEUTRON_QUOTA_FIELDS)
except Exception:
LOG.exception("There was an error checking if the Neutron "
"quotas extension is enabled.")
# Nova
if not (base.is_service_enabled(request, 'compute') and
nova.can_set_quotas()):
disabled_quotas.update(NOVA_QUOTA_FIELDS)
# The 'missing' quota fields are all nova (this is hardcoded in
# dashboards.admin.defaults.workflows)
disabled_quotas.update(MISSING_QUOTA_FIELDS)
# There appear to be no glance quota fields currently
return disabled_quotas
def _get_tenant_compute_usages(request, usages, disabled_quotas, tenant_id):
# Unlike the other services it can be the case that nova is enabled but
# doesn't support quotas, in which case we still want to get usage info,
# so don't rely on '"instances" in disabled_quotas' as elsewhere
if not base.is_service_enabled(request, 'compute'):
return
if tenant_id:
# determine if the user has permission to view across projects
# there are cases where an administrator wants to check the quotas
# on a project they are not scoped to
all_tenants = policy.check((("compute", "compute:get_all_tenants"),),
request)
instances, has_more = nova.server_list(
request, search_opts={'tenant_id': tenant_id},
all_tenants=all_tenants)
else:
instances, has_more = nova.server_list(request)
# Fetch deleted flavors if necessary.
flavors = dict([(f.id, f) for f in nova.flavor_list(request)])
missing_flavors = [instance.flavor['id'] for instance in instances
if instance.flavor['id'] not in flavors]
for missing in missing_flavors:
if missing not in flavors:
try:
flavors[missing] = nova.flavor_get(request, missing)
except Exception:
flavors[missing] = {}
exceptions.handle(request, ignore=True)
usages.tally('instances', len(instances))
# Sum our usage based on the flavors of the instances.
for flavor in [flavors[instance.flavor['id']] for instance in instances]:
usages.tally('cores', getattr(flavor, 'vcpus', None))
usages.tally('ram', getattr(flavor, 'ram', None))
# Initialize the tally if no instances have been launched yet
if len(instances) == 0:
usages.tally('cores', 0)
usages.tally('ram', 0)
def _get_tenant_network_usages(request, usages, disabled_quotas, tenant_id):
floating_ips = []
try:
if network.floating_ip_supported(request):
floating_ips = network.tenant_floating_ip_list(request)
except Exception:
pass
usages.tally('floating_ips', len(floating_ips))
if 'security_group' not in disabled_quotas:
security_groups = []
security_groups = network.security_group_list(request)
usages.tally('security_groups', len(security_groups))
if 'network' not in disabled_quotas:
networks = []
networks = neutron.network_list(request, shared=False)
if tenant_id:
networks = [net for net in networks if net.tenant_id == tenant_id]
usages.tally('networks', len(networks))
# get shared networks
shared_networks = neutron.network_list(request, shared=True)
if tenant_id:
shared_networks = [net for net in shared_networks
if net.tenant_id == tenant_id]
usages.tally('networks', len(shared_networks))
if 'subnet' not in disabled_quotas:
subnets = neutron.subnet_list(request, shared=False)
if tenant_id:
subnets = [sub for sub in subnets if sub.tenant_id == tenant_id]
# get shared subnets
shared_subnets = neutron.subnet_list(request, shared=True)
if tenant_id:
shared_subnets = [subnet for subnet in shared_subnets
if subnet.tenant_id == tenant_id]
usages.tally('subnets', len(subnets) + len(shared_subnets))
if 'router' not in disabled_quotas:
routers = []
routers = neutron.router_list(request)
if tenant_id:
routers = [rou for rou in routers if rou.tenant_id == tenant_id]
usages.tally('routers', len(routers))
def _get_tenant_volume_usages(request, usages, disabled_quotas, tenant_id):
if 'volumes' not in disabled_quotas:
try:
if tenant_id:
opts = {'all_tenants': 1, 'project_id': tenant_id}
volumes = cinder.volume_list(request, opts)
snapshots = cinder.volume_snapshot_list(request, opts)
else:
volumes = cinder.volume_list(request)
snapshots = cinder.volume_snapshot_list(request)
usages.tally('gigabytes', sum([int(v.size) for v in volumes]))
usages.tally('volumes', len(volumes))
usages.tally('snapshots', len(snapshots))
except cinder.cinder_exception.ClientException:
msg = _("Unable to retrieve volume limit information.")
exceptions.handle(request, msg)
@memoized
def tenant_quota_usages(request, tenant_id=None):
"""Get our quotas and construct our usage object.
If no tenant_id is provided, a the request.user.project_id
is assumed to be used
"""
if not tenant_id:
tenant_id = request.user.project_id
disabled_quotas = get_disabled_quotas(request)
usages = QuotaUsage()
for quota in get_tenant_quota_data(request,
disabled_quotas=disabled_quotas,
tenant_id=tenant_id):
usages.add_quota(quota)
# Get our usages.
_get_tenant_compute_usages(request, usages, disabled_quotas, tenant_id)
_get_tenant_network_usages(request, usages, disabled_quotas, tenant_id)
_get_tenant_volume_usages(request, usages, disabled_quotas, tenant_id)
return usages
def tenant_limit_usages(request):
# TODO(licostan): This method shall be removed from Quota module.
# ProjectUsage/BaseUsage maybe used instead on volume/image dashboards.
limits = {}
try:
if base.is_service_enabled(request, 'compute'):
limits.update(nova.tenant_absolute_limits(request, reserved=True))
except Exception:
msg = _("Unable to retrieve compute limit information.")
exceptions.handle(request, msg)
if cinder.is_volume_service_enabled(request):
try:
limits.update(cinder.tenant_absolute_limits(request))
volumes = cinder.volume_list(request)
snapshots = cinder.volume_snapshot_list(request)
# gigabytesUsed should be a total of volumes and snapshots
vol_size = sum([getattr(volume, 'size', 0) for volume
in volumes])
snap_size = sum([getattr(snap, 'size', 0) for snap
in snapshots])
limits['gigabytesUsed'] = vol_size + snap_size
limits['volumesUsed'] = len(volumes)
limits['snapshotsUsed'] = len(snapshots)
except cinder.cinder_exception.ClientException:
msg = _("Unable to retrieve volume limit information.")
exceptions.handle(request, msg)
return limits
def enabled_quotas(request):
"""Returns the list of quotas available minus those that are disabled"""
return set(QUOTA_FIELDS) - get_disabled_quotas(request)
| apache-2.0 | -654,293,287,180,952,300 | 37.767857 | 79 | 0.607381 | false |
MiniLight/DeepCL | thirdparty/cogapp/cogapp/test_cogapp.py | 10 | 66477 | """ Test cogapp.
http://nedbatchelder.com/code/cog
Copyright 2004-2015, Ned Batchelder.
"""
from __future__ import absolute_import
import os, os.path, random, re, shutil, stat, sys, tempfile
# Use unittest2 if it's available, otherwise unittest. This gives us
# back-ported features for 2.6.
try:
import unittest2 as unittest
except ImportError:
import unittest
from .backward import StringIO, to_bytes, b
from .cogapp import Cog, CogOptions, CogGenerator
from .cogapp import CogError, CogUsageError, CogGeneratedError
from .cogapp import usage, __version__
from .whiteutils import reindentBlock
from .makefiles import *
TestCase = unittest.TestCase
class CogTestsInMemory(TestCase):
""" Test cases for cogapp.Cog()
"""
def testNoCog(self):
strings = [
'',
' ',
' \t \t \tx',
'hello',
'the cat\nin the\nhat.',
'Horton\n\tHears A\n\t\tWho'
]
for s in strings:
self.assertEqual(Cog().processString(s), s)
def testSimple(self):
infile = """\
Some text.
//[[[cog
import cog
cog.outl("This is line one\\n")
cog.outl("This is line two")
//]]]
gobbledegook.
//[[[end]]]
epilogue.
"""
outfile = """\
Some text.
//[[[cog
import cog
cog.outl("This is line one\\n")
cog.outl("This is line two")
//]]]
This is line one
This is line two
//[[[end]]]
epilogue.
"""
self.assertEqual(Cog().processString(infile), outfile)
def testEmptyCog(self):
# The cog clause can be totally empty. Not sure why you'd want it,
# but it works.
infile = """\
hello
//[[[cog
//]]]
//[[[end]]]
goodbye
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), infile)
def testMultipleCogs(self):
# One file can have many cog chunks, even abutting each other.
infile = """\
//[[[cog
cog.out("chunk1")
//]]]
chunk1
//[[[end]]]
//[[[cog
cog.out("chunk2")
//]]]
chunk2
//[[[end]]]
between chunks
//[[[cog
cog.out("chunk3")
//]]]
chunk3
//[[[end]]]
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), infile)
def testTrimBlankLines(self):
infile = """\
//[[[cog
cog.out("This is line one\\n", trimblanklines=True)
cog.out('''
This is line two
''', dedent=True, trimblanklines=True)
cog.outl("This is line three", trimblanklines=True)
//]]]
This is line one
This is line two
This is line three
//[[[end]]]
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), infile)
def testTrimEmptyBlankLines(self):
infile = """\
//[[[cog
cog.out("This is line one\\n", trimblanklines=True)
cog.out('''
This is line two
''', dedent=True, trimblanklines=True)
cog.out('', dedent=True, trimblanklines=True)
cog.outl("This is line three", trimblanklines=True)
//]]]
This is line one
This is line two
This is line three
//[[[end]]]
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), infile)
def test22EndOfLine(self):
# In Python 2.2, this cog file was not parsing because the
# last line is indented but didn't end with a newline.
infile = """\
//[[[cog
import cog
for i in range(3):
cog.out("%d\\n" % i)
//]]]
0
1
2
//[[[end]]]
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), infile)
def testIndentedCode(self):
infile = """\
first line
[[[cog
import cog
for i in range(3):
cog.out("xx%d\\n" % i)
]]]
xx0
xx1
xx2
[[[end]]]
last line
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), infile)
def testPrefixedCode(self):
infile = """\
--[[[cog
--import cog
--for i in range(3):
-- cog.out("xx%d\\n" % i)
--]]]
xx0
xx1
xx2
--[[[end]]]
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), infile)
def testPrefixedIndentedCode(self):
infile = """\
prologue
--[[[cog
-- import cog
-- for i in range(3):
-- cog.out("xy%d\\n" % i)
--]]]
xy0
xy1
xy2
--[[[end]]]
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), infile)
def testBogusPrefixMatch(self):
infile = """\
prologue
#[[[cog
import cog
# This comment should not be clobbered by removing the pound sign.
for i in range(3):
cog.out("xy%d\\n" % i)
#]]]
xy0
xy1
xy2
#[[[end]]]
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), infile)
def testNoFinalNewline(self):
# If the cog'ed output has no final newline,
# it shouldn't eat up the cog terminator.
infile = """\
prologue
[[[cog
import cog
for i in range(3):
cog.out("%d" % i)
]]]
012
[[[end]]]
epilogue
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), infile)
def testNoOutputAtAll(self):
# If there is absolutely no cog output, that's ok.
infile = """\
prologue
[[[cog
i = 1
]]]
[[[end]]]
epilogue
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), infile)
def testPurelyBlankLine(self):
# If there is a blank line in the cog code with no whitespace
# prefix, that should be OK.
infile = """\
prologue
[[[cog
import sys
cog.out("Hello")
$
cog.out("There")
]]]
HelloThere
[[[end]]]
epilogue
"""
infile = reindentBlock(infile.replace('$', ''))
self.assertEqual(Cog().processString(infile), infile)
def testEmptyOutl(self):
# Alexander Belchenko suggested the string argument to outl should
# be optional. Does it work?
infile = """\
prologue
[[[cog
cog.outl("x")
cog.outl()
cog.outl("y")
cog.outl(trimblanklines=True)
cog.outl("z")
]]]
x
y
z
[[[end]]]
epilogue
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), infile)
def testFirstLineNum(self):
infile = """\
fooey
[[[cog
cog.outl("started at line number %d" % cog.firstLineNum)
]]]
started at line number 2
[[[end]]]
blah blah
[[[cog
cog.outl("and again at line %d" % cog.firstLineNum)
]]]
and again at line 8
[[[end]]]
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), infile)
def testCompactOneLineCode(self):
infile = """\
first line
hey: [[[cog cog.outl("hello %d" % (3*3*3*3)) ]]] looky!
get rid of this!
[[[end]]]
last line
"""
outfile = """\
first line
hey: [[[cog cog.outl("hello %d" % (3*3*3*3)) ]]] looky!
hello 81
[[[end]]]
last line
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), reindentBlock(outfile))
def testInsideOutCompact(self):
infile = """\
first line
hey?: ]]] what is this? [[[cog strange!
get rid of this!
[[[end]]]
last line
"""
with self.assertRaisesRegexp(CogError, r"infile.txt\(2\): Cog code markers inverted"):
Cog().processString(reindentBlock(infile), "infile.txt")
def testSharingGlobals(self):
infile = """\
first line
hey: [[[cog s="hey there" ]]] looky!
[[[end]]]
more literal junk.
[[[cog cog.outl(s) ]]]
[[[end]]]
last line
"""
outfile = """\
first line
hey: [[[cog s="hey there" ]]] looky!
[[[end]]]
more literal junk.
[[[cog cog.outl(s) ]]]
hey there
[[[end]]]
last line
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), reindentBlock(outfile))
def testAssertInCogCode(self):
# Check that we can test assertions in cog code in the test framework.
infile = """\
[[[cog
assert 1 == 2, "Oops"
]]]
[[[end]]]
"""
infile = reindentBlock(infile)
with self.assertRaisesRegexp(AssertionError, "Oops"):
Cog().processString(infile)
def testCogPrevious(self):
# Check that we can access the previous run's output.
infile = """\
[[[cog
assert cog.previous == "Hello there!\\n", "WTF??"
cog.out(cog.previous)
cog.outl("Ran again!")
]]]
Hello there!
[[[end]]]
"""
outfile = """\
[[[cog
assert cog.previous == "Hello there!\\n", "WTF??"
cog.out(cog.previous)
cog.outl("Ran again!")
]]]
Hello there!
Ran again!
[[[end]]]
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), reindentBlock(outfile))
class CogOptionsTests(TestCase):
""" Test the CogOptions class.
"""
def testEquality(self):
o = CogOptions()
p = CogOptions()
self.assertEqual(o, p)
o.parseArgs(['-r'])
self.assertNotEqual(o, p)
p.parseArgs(['-r'])
self.assertEqual(o, p)
def testCloning(self):
o = CogOptions()
o.parseArgs(['-I', 'fooey', '-I', 'booey', '-s', ' /*x*/'])
p = o.clone()
self.assertEqual(o, p)
p.parseArgs(['-I', 'huey', '-D', 'foo=quux'])
self.assertNotEqual(o, p)
q = CogOptions()
q.parseArgs(['-I', 'fooey', '-I', 'booey', '-s', ' /*x*/', '-I', 'huey', '-D', 'foo=quux'])
self.assertEqual(p, q)
def testCombiningFlags(self):
# Single-character flags can be combined.
o = CogOptions()
o.parseArgs(['-e', '-r', '-z'])
p = CogOptions()
p.parseArgs(['-erz'])
self.assertEqual(o, p)
class FileStructureTests(TestCase):
""" Test cases to check that we're properly strict about the structure
of files.
"""
def isBad(self, infile, msg=None):
infile = reindentBlock(infile)
with self.assertRaisesRegexp(CogError, re.escape(msg)):
Cog().processString(infile, 'infile.txt')
def testBeginNoEnd(self):
infile = """\
Fooey
#[[[cog
cog.outl('hello')
"""
self.isBad(infile, "infile.txt(2): Cog block begun but never ended.")
def testNoEoo(self):
infile = """\
Fooey
#[[[cog
cog.outl('hello')
#]]]
"""
self.isBad(infile, "infile.txt(4): Missing '[[[end]]]' before end of file.")
infile2 = """\
Fooey
#[[[cog
cog.outl('hello')
#]]]
#[[[cog
cog.outl('goodbye')
#]]]
"""
self.isBad(infile2, "infile.txt(5): Unexpected '[[[cog'")
def testStartWithEnd(self):
infile = """\
#]]]
"""
self.isBad(infile, "infile.txt(1): Unexpected ']]]'")
infile2 = """\
#[[[cog
cog.outl('hello')
#]]]
#[[[end]]]
#]]]
"""
self.isBad(infile2, "infile.txt(5): Unexpected ']]]'")
def testStartWithEoo(self):
infile = """\
#[[[end]]]
"""
self.isBad(infile, "infile.txt(1): Unexpected '[[[end]]]'")
infile2 = """\
#[[[cog
cog.outl('hello')
#]]]
#[[[end]]]
#[[[end]]]
"""
self.isBad(infile2, "infile.txt(5): Unexpected '[[[end]]]'")
def testNoEnd(self):
infile = """\
#[[[cog
cog.outl("hello")
#[[[end]]]
"""
self.isBad(infile, "infile.txt(3): Unexpected '[[[end]]]'")
infile2 = """\
#[[[cog
cog.outl('hello')
#]]]
#[[[end]]]
#[[[cog
cog.outl("hello")
#[[[end]]]
"""
self.isBad(infile2, "infile.txt(7): Unexpected '[[[end]]]'")
def testTwoBegins(self):
infile = """\
#[[[cog
#[[[cog
cog.outl("hello")
#]]]
#[[[end]]]
"""
self.isBad(infile, "infile.txt(2): Unexpected '[[[cog'")
infile2 = """\
#[[[cog
cog.outl("hello")
#]]]
#[[[end]]]
#[[[cog
#[[[cog
cog.outl("hello")
#]]]
#[[[end]]]
"""
self.isBad(infile2, "infile.txt(6): Unexpected '[[[cog'")
def testTwoEnds(self):
infile = """\
#[[[cog
cog.outl("hello")
#]]]
#]]]
#[[[end]]]
"""
self.isBad(infile, "infile.txt(4): Unexpected ']]]'")
infile2 = """\
#[[[cog
cog.outl("hello")
#]]]
#[[[end]]]
#[[[cog
cog.outl("hello")
#]]]
#]]]
#[[[end]]]
"""
self.isBad(infile2, "infile.txt(8): Unexpected ']]]'")
class CogErrorTests(TestCase):
""" Test cases for cog.error().
"""
def testErrorMsg(self):
infile = """\
[[[cog cog.error("This ain't right!")]]]
[[[end]]]
"""
infile = reindentBlock(infile)
with self.assertRaisesRegexp(CogGeneratedError, "This ain't right!"):
Cog().processString(infile)
def testErrorNoMsg(self):
infile = """\
[[[cog cog.error()]]]
[[[end]]]
"""
infile = reindentBlock(infile)
with self.assertRaisesRegexp(CogGeneratedError, "Error raised by cog generator."):
Cog().processString(infile)
def testNoErrorIfErrorNotCalled(self):
infile = """\
--[[[cog
--import cog
--for i in range(3):
-- if i > 10:
-- cog.error("Something is amiss!")
-- cog.out("xx%d\\n" % i)
--]]]
xx0
xx1
xx2
--[[[end]]]
"""
infile = reindentBlock(infile)
self.assertEqual(Cog().processString(infile), infile)
class CogGeneratorGetCodeTests(TestCase):
""" Unit tests against CogGenerator to see if its getCode() method works
properly.
"""
def setUp(self):
""" All tests get a generator to use, and short same-length names for
the functions we're going to use.
"""
self.gen = CogGenerator()
self.m = self.gen.parseMarker
self.l = self.gen.parseLine
def testEmpty(self):
self.m('// [[[cog')
self.m('// ]]]')
self.assertEqual(self.gen.getCode(), '')
def testSimple(self):
self.m('// [[[cog')
self.l(' print "hello"')
self.l(' print "bye"')
self.m('// ]]]')
self.assertEqual(self.gen.getCode(), 'print "hello"\nprint "bye"')
def testCompressed1(self):
# For a while, I supported compressed code blocks, but no longer.
self.m('// [[[cog: print """')
self.l('// hello')
self.l('// bye')
self.m('// """)]]]')
self.assertEqual(self.gen.getCode(), 'hello\nbye')
def testCompressed2(self):
# For a while, I supported compressed code blocks, but no longer.
self.m('// [[[cog: print """')
self.l('hello')
self.l('bye')
self.m('// """)]]]')
self.assertEqual(self.gen.getCode(), 'hello\nbye')
def testCompressed3(self):
# For a while, I supported compressed code blocks, but no longer.
self.m('// [[[cog')
self.l('print """hello')
self.l('bye')
self.m('// """)]]]')
self.assertEqual(self.gen.getCode(), 'print """hello\nbye')
def testCompressed4(self):
# For a while, I supported compressed code blocks, but no longer.
self.m('// [[[cog: print """')
self.l('hello')
self.l('bye""")')
self.m('// ]]]')
self.assertEqual(self.gen.getCode(), 'hello\nbye""")')
def testNoCommonPrefixForMarkers(self):
# It's important to be able to use #if 0 to hide lines from a
# C++ compiler.
self.m('#if 0 //[[[cog')
self.l('\timport cog, sys')
self.l('')
self.l('\tprint sys.argv')
self.m('#endif //]]]')
self.assertEqual(self.gen.getCode(), 'import cog, sys\n\nprint sys.argv')
class TestCaseWithTempDir(TestCase):
def newCog(self):
""" Initialize the cog members for another run.
"""
# Create a cog engine, and catch its output.
self.cog = Cog()
self.output = StringIO()
self.cog.setOutput(stdout=self.output, stderr=self.output)
def setUp(self):
# Create a temporary directory.
self.tempdir = os.path.join(tempfile.gettempdir(), 'testcog_tempdir_' + str(random.random())[2:])
os.mkdir(self.tempdir)
self.olddir = os.getcwd()
os.chdir(self.tempdir)
self.newCog()
def tearDown(self):
os.chdir(self.olddir)
# Get rid of the temporary directory.
shutil.rmtree(self.tempdir)
def assertFilesSame(self, sFName1, sFName2):
text1 = open(os.path.join(self.tempdir, sFName1), 'rb').read()
text2 = open(os.path.join(self.tempdir, sFName2), 'rb').read()
self.assertEqual(text1, text2)
def assertFileContent(self, sFName, sContent):
sAbsName = os.path.join(self.tempdir, sFName)
f = open(sAbsName, 'rb')
try:
sFileContent = f.read()
finally:
f.close()
self.assertEqual(sFileContent, to_bytes(sContent))
class ArgumentHandlingTests(TestCaseWithTempDir):
def testArgumentFailure(self):
# Return value 2 means usage problem.
self.assertEqual(self.cog.main(['argv0', '-j']), 2)
output = self.output.getvalue()
self.assertIn("option -j not recognized", output)
with self.assertRaises(CogUsageError):
self.cog.callableMain(['argv0'])
with self.assertRaises(CogUsageError):
self.cog.callableMain(['argv0', '-j'])
def testNoDashOAndAtFile(self):
d = {
'cogfiles.txt': """\
# Please run cog
"""
}
makeFiles(d)
with self.assertRaises(CogUsageError):
self.cog.callableMain(['argv0', '-o', 'foo', '@cogfiles.txt'])
def testDashV(self):
self.assertEqual(self.cog.main(['argv0', '-v']), 0)
output = self.output.getvalue()
self.assertEqual('Cog version %s\n' % __version__, output)
def producesHelp(self, args):
self.newCog()
argv = ['argv0'] + args.split()
self.assertEqual(self.cog.main(argv), 0)
self.assertEqual(usage, self.output.getvalue())
def testDashH(self):
# -h or -? anywhere on the command line should just print help.
self.producesHelp("-h")
self.producesHelp("-?")
self.producesHelp("fooey.txt -h")
self.producesHelp("-o -r @fooey.txt -? @booey.txt")
def testDashOAndDashR(self):
d = {
'cogfile.txt': """\
# Please run cog
"""
}
makeFiles(d)
with self.assertRaises(CogUsageError):
self.cog.callableMain(['argv0', '-o', 'foo', '-r', 'cogfile.txt'])
def testDashZ(self):
d = {
'test.cog': """\
// This is my C++ file.
//[[[cog
fnames = ['DoSomething', 'DoAnotherThing', 'DoLastThing']
for fn in fnames:
cog.outl("void %s();" % fn)
//]]]
""",
'test.out': """\
// This is my C++ file.
//[[[cog
fnames = ['DoSomething', 'DoAnotherThing', 'DoLastThing']
for fn in fnames:
cog.outl("void %s();" % fn)
//]]]
void DoSomething();
void DoAnotherThing();
void DoLastThing();
""",
}
makeFiles(d)
with self.assertRaisesRegexp(CogError, re.escape("test.cog(6): Missing '[[[end]]]' before end of file.")):
self.cog.callableMain(['argv0', '-r', 'test.cog'])
self.newCog()
self.cog.callableMain(['argv0', '-r', '-z', 'test.cog'])
self.assertFilesSame('test.cog', 'test.out')
def testBadDashD(self):
with self.assertRaises(CogUsageError):
self.cog.callableMain(['argv0', '-Dfooey', 'cog.txt'])
with self.assertRaises(CogUsageError):
self.cog.callableMain(['argv0', '-D', 'fooey', 'cog.txt'])
class TestFileHandling(TestCaseWithTempDir):
def testSimple(self):
d = {
'test.cog': """\
// This is my C++ file.
//[[[cog
fnames = ['DoSomething', 'DoAnotherThing', 'DoLastThing']
for fn in fnames:
cog.outl("void %s();" % fn)
//]]]
//[[[end]]]
""",
'test.out': """\
// This is my C++ file.
//[[[cog
fnames = ['DoSomething', 'DoAnotherThing', 'DoLastThing']
for fn in fnames:
cog.outl("void %s();" % fn)
//]]]
void DoSomething();
void DoAnotherThing();
void DoLastThing();
//[[[end]]]
""",
}
makeFiles(d)
self.cog.callableMain(['argv0', '-r', 'test.cog'])
self.assertFilesSame('test.cog', 'test.out')
output = self.output.getvalue()
self.assertIn("(changed)", output)
def testOutputFile(self):
# -o sets the output file.
d = {
'test.cog': """\
// This is my C++ file.
//[[[cog
fnames = ['DoSomething', 'DoAnotherThing', 'DoLastThing']
for fn in fnames:
cog.outl("void %s();" % fn)
//]]]
//[[[end]]]
""",
'test.out': """\
// This is my C++ file.
//[[[cog
fnames = ['DoSomething', 'DoAnotherThing', 'DoLastThing']
for fn in fnames:
cog.outl("void %s();" % fn)
//]]]
void DoSomething();
void DoAnotherThing();
void DoLastThing();
//[[[end]]]
""",
}
makeFiles(d)
self.cog.callableMain(['argv0', '-o', 'test.cogged', 'test.cog'])
self.assertFilesSame('test.cogged', 'test.out')
def testAtFile(self):
d = {
'one.cog': """\
//[[[cog
cog.outl("hello world")
//]]]
//[[[end]]]
""",
'one.out': """\
//[[[cog
cog.outl("hello world")
//]]]
hello world
//[[[end]]]
""",
'two.cog': """\
//[[[cog
cog.outl("goodbye cruel world")
//]]]
//[[[end]]]
""",
'two.out': """\
//[[[cog
cog.outl("goodbye cruel world")
//]]]
goodbye cruel world
//[[[end]]]
""",
'cogfiles.txt': """\
# Please run cog
one.cog
two.cog
"""
}
makeFiles(d)
self.cog.callableMain(['argv0', '-r', '@cogfiles.txt'])
self.assertFilesSame('one.cog', 'one.out')
self.assertFilesSame('two.cog', 'two.out')
output = self.output.getvalue()
self.assertIn("(changed)", output)
def testNestedAtFile(self):
d = {
'one.cog': """\
//[[[cog
cog.outl("hello world")
//]]]
//[[[end]]]
""",
'one.out': """\
//[[[cog
cog.outl("hello world")
//]]]
hello world
//[[[end]]]
""",
'two.cog': """\
//[[[cog
cog.outl("goodbye cruel world")
//]]]
//[[[end]]]
""",
'two.out': """\
//[[[cog
cog.outl("goodbye cruel world")
//]]]
goodbye cruel world
//[[[end]]]
""",
'cogfiles.txt': """\
# Please run cog
one.cog
@cogfiles2.txt
""",
'cogfiles2.txt': """\
# This one too, please.
two.cog
""",
}
makeFiles(d)
self.cog.callableMain(['argv0', '-r', '@cogfiles.txt'])
self.assertFilesSame('one.cog', 'one.out')
self.assertFilesSame('two.cog', 'two.out')
output = self.output.getvalue()
self.assertIn("(changed)", output)
def testAtFileWithArgs(self):
d = {
'both.cog': """\
//[[[cog
cog.outl("one: %s" % ('one' in globals()))
cog.outl("two: %s" % ('two' in globals()))
//]]]
//[[[end]]]
""",
'one.out': """\
//[[[cog
cog.outl("one: %s" % ('one' in globals()))
cog.outl("two: %s" % ('two' in globals()))
//]]]
one: True // ONE
two: False // ONE
//[[[end]]]
""",
'two.out': """\
//[[[cog
cog.outl("one: %s" % ('one' in globals()))
cog.outl("two: %s" % ('two' in globals()))
//]]]
one: False // TWO
two: True // TWO
//[[[end]]]
""",
'cogfiles.txt': """\
# Please run cog
both.cog -o both.one -s ' // ONE' -D one=x
both.cog -o both.two -s ' // TWO' -D two=x
"""
}
makeFiles(d)
self.cog.callableMain(['argv0', '@cogfiles.txt'])
self.assertFilesSame('both.one', 'one.out')
self.assertFilesSame('both.two', 'two.out')
def testAtFileWithBadArgCombo(self):
d = {
'both.cog': """\
//[[[cog
cog.outl("one: %s" % ('one' in globals()))
cog.outl("two: %s" % ('two' in globals()))
//]]]
//[[[end]]]
""",
'cogfiles.txt': """\
# Please run cog
both.cog
both.cog -d # This is bad: -r and -d
"""
}
makeFiles(d)
with self.assertRaises(CogUsageError):
self.cog.callableMain(['argv0', '-r', '@cogfiles.txt'])
def testAtFileWithTrickyFilenames(self):
def fix_backslashes(files_txt):
"""Make the contents of a files.txt sensitive to the platform."""
if sys.platform != "win32":
files_txt = files_txt.replace("\\", "/")
return files_txt
d = {
'one 1.cog': """\
//[[[cog cog.outl("hello world") ]]]
""",
'one.out': """\
//[[[cog cog.outl("hello world") ]]]
hello world //xxx
""",
'subdir': {
'subback.cog': """\
//[[[cog cog.outl("down deep with backslashes") ]]]
""",
'subfwd.cog': """\
//[[[cog cog.outl("down deep with slashes") ]]]
""",
},
'subback.out': """\
//[[[cog cog.outl("down deep with backslashes") ]]]
down deep with backslashes //yyy
""",
'subfwd.out': """\
//[[[cog cog.outl("down deep with slashes") ]]]
down deep with slashes //zzz
""",
'cogfiles.txt': fix_backslashes("""\
# Please run cog
'one 1.cog' -s ' //xxx'
subdir\\subback.cog -s ' //yyy'
subdir/subfwd.cog -s ' //zzz'
""")
}
makeFiles(d)
self.cog.callableMain(['argv0', '-z', '-r', '@cogfiles.txt'])
self.assertFilesSame('one 1.cog', 'one.out')
self.assertFilesSame('subdir/subback.cog', 'subback.out')
self.assertFilesSame('subdir/subfwd.cog', 'subfwd.out')
class CogTestLineEndings(TestCaseWithTempDir):
"""Tests for -U option (force LF line-endings in output)."""
lines_in = ['Some text.',
'//[[[cog',
'cog.outl("Cog text")',
'//]]]',
'gobbledegook.',
'//[[[end]]]',
'epilogue.',
'']
lines_out = ['Some text.',
'//[[[cog',
'cog.outl("Cog text")',
'//]]]',
'Cog text',
'//[[[end]]]',
'epilogue.',
'']
def testOutputNativeEol(self):
makeFiles({'infile': '\n'.join(self.lines_in)})
self.cog.callableMain(['argv0', '-o', 'outfile', 'infile'])
self.assertFileContent('outfile', os.linesep.join(self.lines_out))
def testOutputLfEol(self):
makeFiles({'infile': '\n'.join(self.lines_in)})
self.cog.callableMain(['argv0', '-U', '-o', 'outfile', 'infile'])
self.assertFileContent('outfile', '\n'.join(self.lines_out))
def testReplaceNativeEol(self):
makeFiles({'test.cog': '\n'.join(self.lines_in)})
self.cog.callableMain(['argv0', '-r', 'test.cog'])
self.assertFileContent('test.cog', os.linesep.join(self.lines_out))
def testReplaceLfEol(self):
makeFiles({'test.cog': '\n'.join(self.lines_in)})
self.cog.callableMain(['argv0', '-U', '-r', 'test.cog'])
self.assertFileContent('test.cog', '\n'.join(self.lines_out))
class CogTestCharacterEncoding(TestCaseWithTempDir):
def testSimple(self):
d = {
'test.cog': b("""\
// This is my C++ file.
//[[[cog
cog.outl("// Unicode: \xe1\x88\xb4 (U+1234)")
//]]]
//[[[end]]]
"""),
'test.out': b("""\
// This is my C++ file.
//[[[cog
cog.outl("// Unicode: \xe1\x88\xb4 (U+1234)")
//]]]
// Unicode: \xe1\x88\xb4 (U+1234)
//[[[end]]]
"""),
}
makeFiles(d)
self.cog.callableMain(['argv0', '-r', 'test.cog'])
self.assertFilesSame('test.cog', 'test.out')
output = self.output.getvalue()
self.assertIn("(changed)", output)
def testFileEncodingOption(self):
d = {
'test.cog': b("""\
// \xca\xee\xe4\xe8\xf0\xe2\xea\xe0 Windows
//[[[cog
cog.outl("\xd1\xfa\xe5\xf8\xfc \xe5\xf9\xb8 \xfd\xf2\xe8\xf5 \xec\xff\xe3\xea\xe8\xf5 \xf4\xf0\xe0\xed\xf6\xf3\xe7\xf1\xea\xe8\xf5 \xe1\xf3\xeb\xee\xea \xe4\xe0 \xe2\xfb\xef\xe5\xe9 \xf7\xe0\xfe")
//]]]
//[[[end]]]
"""),
'test.out': b("""\
// \xca\xee\xe4\xe8\xf0\xe2\xea\xe0 Windows
//[[[cog
cog.outl("\xd1\xfa\xe5\xf8\xfc \xe5\xf9\xb8 \xfd\xf2\xe8\xf5 \xec\xff\xe3\xea\xe8\xf5 \xf4\xf0\xe0\xed\xf6\xf3\xe7\xf1\xea\xe8\xf5 \xe1\xf3\xeb\xee\xea \xe4\xe0 \xe2\xfb\xef\xe5\xe9 \xf7\xe0\xfe")
//]]]
\xd1\xfa\xe5\xf8\xfc \xe5\xf9\xb8 \xfd\xf2\xe8\xf5 \xec\xff\xe3\xea\xe8\xf5 \xf4\xf0\xe0\xed\xf6\xf3\xe7\xf1\xea\xe8\xf5 \xe1\xf3\xeb\xee\xea \xe4\xe0 \xe2\xfb\xef\xe5\xe9 \xf7\xe0\xfe
//[[[end]]]
"""),
}
makeFiles(d)
self.cog.callableMain(['argv0', '-n', 'cp1251', '-r', 'test.cog'])
self.assertFilesSame('test.cog', 'test.out')
output = self.output.getvalue()
self.assertIn("(changed)", output)
class TestCaseWithImports(TestCaseWithTempDir):
""" When running tests which import modules, the sys.modules list
leaks from one test to the next. This test case class scrubs
the list after each run to keep the tests isolated from each other.
"""
def setUp(self):
TestCaseWithTempDir.setUp(self)
self.sysmodulekeys = list(sys.modules)
def tearDown(self):
modstoscrub = [
modname
for modname in sys.modules
if modname not in self.sysmodulekeys
]
for modname in modstoscrub:
del sys.modules[modname]
TestCaseWithTempDir.tearDown(self)
class CogIncludeTests(TestCaseWithImports):
dincludes = {
'test.cog': """\
//[[[cog
import mymodule
//]]]
//[[[end]]]
""",
'test.out': """\
//[[[cog
import mymodule
//]]]
Hello from mymodule
//[[[end]]]
""",
'test2.out': """\
//[[[cog
import mymodule
//]]]
Hello from mymodule in inc2
//[[[end]]]
""",
'include': {
'mymodule.py': """\
import cog
cog.outl("Hello from mymodule")
"""
},
'inc2': {
'mymodule.py': """\
import cog
cog.outl("Hello from mymodule in inc2")
"""
},
'inc3': {
'someothermodule.py': """\
import cog
cog.outl("This is some other module.")
"""
},
}
def testNeedIncludePath(self):
# Try it without the -I, to see that an ImportError happens.
makeFiles(self.dincludes)
with self.assertRaises(ImportError):
self.cog.callableMain(['argv0', '-r', 'test.cog'])
def testIncludePath(self):
# Test that -I adds include directories properly.
makeFiles(self.dincludes)
self.cog.callableMain(['argv0', '-r', '-I', 'include', 'test.cog'])
self.assertFilesSame('test.cog', 'test.out')
def testTwoIncludePaths(self):
# Test that two -I's add include directories properly.
makeFiles(self.dincludes)
self.cog.callableMain(['argv0', '-r', '-I', 'include', '-I', 'inc2', 'test.cog'])
self.assertFilesSame('test.cog', 'test.out')
def testTwoIncludePaths2(self):
# Test that two -I's add include directories properly.
makeFiles(self.dincludes)
self.cog.callableMain(['argv0', '-r', '-I', 'inc2', '-I', 'include', 'test.cog'])
self.assertFilesSame('test.cog', 'test2.out')
def testUselessIncludePath(self):
# Test that the search will continue past the first directory.
makeFiles(self.dincludes)
self.cog.callableMain(['argv0', '-r', '-I', 'inc3', '-I', 'include', 'test.cog'])
self.assertFilesSame('test.cog', 'test.out')
def testSysPathIsUnchanged(self):
d = {
'bad.cog': """\
//[[[cog cog.error("Oh no!") ]]]
//[[[end]]]
""",
'good.cog': """\
//[[[cog cog.outl("Oh yes!") ]]]
//[[[end]]]
""",
}
makeFiles(d)
# Is it unchanged just by creating a cog engine?
oldsyspath = sys.path[:]
self.newCog()
self.assertEqual(oldsyspath, sys.path)
# Is it unchanged for a successful run?
self.newCog()
self.cog.callableMain(['argv0', '-r', 'good.cog'])
self.assertEqual(oldsyspath, sys.path)
# Is it unchanged for a successful run with includes?
self.newCog()
self.cog.callableMain(['argv0', '-r', '-I', 'xyzzy', 'good.cog'])
self.assertEqual(oldsyspath, sys.path)
# Is it unchanged for a successful run with two includes?
self.newCog()
self.cog.callableMain(['argv0', '-r', '-I', 'xyzzy', '-I', 'quux', 'good.cog'])
self.assertEqual(oldsyspath, sys.path)
# Is it unchanged for a failed run?
self.newCog()
with self.assertRaises(CogError):
self.cog.callableMain(['argv0', '-r', 'bad.cog'])
self.assertEqual(oldsyspath, sys.path)
# Is it unchanged for a failed run with includes?
self.newCog()
with self.assertRaises(CogError):
self.cog.callableMain(['argv0', '-r', '-I', 'xyzzy', 'bad.cog'])
self.assertEqual(oldsyspath, sys.path)
# Is it unchanged for a failed run with two includes?
self.newCog()
with self.assertRaises(CogError):
self.cog.callableMain(['argv0', '-r', '-I', 'xyzzy', '-I', 'quux', 'bad.cog'])
self.assertEqual(oldsyspath, sys.path)
def testSubDirectories(self):
# Test that relative paths on the command line work, with includes.
d = {
'code': {
'test.cog': """\
//[[[cog
import mysubmodule
//]]]
//[[[end]]]
""",
'test.out': """\
//[[[cog
import mysubmodule
//]]]
Hello from mysubmodule
//[[[end]]]
""",
'mysubmodule.py': """\
import cog
cog.outl("Hello from mysubmodule")
"""
}
}
makeFiles(d)
# We should be able to invoke cog without the -I switch, and it will
# auto-include the current directory
self.cog.callableMain(['argv0', '-r', 'code/test.cog'])
self.assertFilesSame('code/test.cog', 'code/test.out')
class CogTestsInFiles(TestCaseWithTempDir):
def testWarnIfNoCogCode(self):
# Test that the -e switch warns if there is no Cog code.
d = {
'with.cog': """\
//[[[cog
cog.outl("hello world")
//]]]
hello world
//[[[end]]]
""",
'without.cog': """\
There's no cog
code in this file.
""",
}
makeFiles(d)
self.cog.callableMain(['argv0', '-e', 'with.cog'])
output = self.output.getvalue()
self.assertNotIn("Warning", output)
self.newCog()
self.cog.callableMain(['argv0', '-e', 'without.cog'])
output = self.output.getvalue()
self.assertIn("Warning: no cog code found in without.cog", output)
self.newCog()
self.cog.callableMain(['argv0', 'without.cog'])
output = self.output.getvalue()
self.assertNotIn("Warning", output)
def testFileNameProps(self):
d = {
'cog1.txt': """\
//[[[cog
cog.outl("This is %s in, %s out" % (cog.inFile, cog.outFile))
//]]]
this is cog1.txt in, cog1.txt out
[[[end]]]
""",
'cog1.out': """\
//[[[cog
cog.outl("This is %s in, %s out" % (cog.inFile, cog.outFile))
//]]]
This is cog1.txt in, cog1.txt out
[[[end]]]
""",
'cog1out.out': """\
//[[[cog
cog.outl("This is %s in, %s out" % (cog.inFile, cog.outFile))
//]]]
This is cog1.txt in, cog1out.txt out
[[[end]]]
""",
}
makeFiles(d)
self.cog.callableMain(['argv0', '-r', 'cog1.txt'])
self.assertFilesSame('cog1.txt', 'cog1.out')
self.newCog()
self.cog.callableMain(['argv0', '-o', 'cog1out.txt', 'cog1.txt'])
self.assertFilesSame('cog1out.txt', 'cog1out.out')
def testGlobalsDontCrossFiles(self):
# Make sure that global values don't get shared between files.
d = {
'one.cog': """\
//[[[cog s = "This was set in one.cog" ]]]
//[[[end]]]
//[[[cog cog.outl(s) ]]]
//[[[end]]]
""",
'one.out': """\
//[[[cog s = "This was set in one.cog" ]]]
//[[[end]]]
//[[[cog cog.outl(s) ]]]
This was set in one.cog
//[[[end]]]
""",
'two.cog': """\
//[[[cog
try:
cog.outl(s)
except NameError:
cog.outl("s isn't set!")
//]]]
//[[[end]]]
""",
'two.out': """\
//[[[cog
try:
cog.outl(s)
except NameError:
cog.outl("s isn't set!")
//]]]
s isn't set!
//[[[end]]]
""",
'cogfiles.txt': """\
# Please run cog
one.cog
two.cog
"""
}
makeFiles(d)
self.cog.callableMain(['argv0', '-r', '@cogfiles.txt'])
self.assertFilesSame('one.cog', 'one.out')
self.assertFilesSame('two.cog', 'two.out')
output = self.output.getvalue()
self.assertIn("(changed)", output)
def testRemoveGeneratedOutput(self):
d = {
'cog1.txt': """\
//[[[cog
cog.outl("This line was generated.")
//]]]
This line was generated.
//[[[end]]]
This line was not.
""",
'cog1.out': """\
//[[[cog
cog.outl("This line was generated.")
//]]]
//[[[end]]]
This line was not.
""",
'cog1.out2': """\
//[[[cog
cog.outl("This line was generated.")
//]]]
This line was generated.
//[[[end]]]
This line was not.
""",
}
makeFiles(d)
# Remove generated output.
self.cog.callableMain(['argv0', '-r', '-x', 'cog1.txt'])
self.assertFilesSame('cog1.txt', 'cog1.out')
self.newCog()
# Regenerate the generated output.
self.cog.callableMain(['argv0', '-r', 'cog1.txt'])
self.assertFilesSame('cog1.txt', 'cog1.out2')
self.newCog()
# Remove the generated output again.
self.cog.callableMain(['argv0', '-r', '-x', 'cog1.txt'])
self.assertFilesSame('cog1.txt', 'cog1.out')
def testMsgCall(self):
infile = """\
#[[[cog
cog.msg("Hello there!")
#]]]
#[[[end]]]
"""
infile = reindentBlock(infile)
self.assertEqual(self.cog.processString(infile), infile)
output = self.output.getvalue()
self.assertEqual(output, "Message: Hello there!\n")
def testErrorMessageHasNoTraceback(self):
# Test that a Cog error is printed to stderr with no traceback.
d = {
'cog1.txt': """\
//[[[cog
cog.outl("This line was newly")
cog.outl("generated by cog")
cog.outl("blah blah.")
//]]]
Xhis line was newly
generated by cog
blah blah.
//[[[end]]] (checksum: a8540982e5ad6b95c9e9a184b26f4346)
""",
}
makeFiles(d)
stderr = StringIO()
self.cog.setOutput(stderr=stderr)
self.cog.main(['argv0', '-c', '-r', "cog1.txt"])
output = self.output.getvalue()
self.assertEqual(self.output.getvalue(), "Cogging cog1.txt\n")
self.assertEqual(stderr.getvalue(), "cog1.txt(9): Output has been edited! Delete old checksum to unprotect.\n")
def testDashD(self):
d = {
'test.cog': """\
--[[[cog cog.outl("Defined fooey as " + fooey) ]]]
--[[[end]]]
""",
'test.kablooey': """\
--[[[cog cog.outl("Defined fooey as " + fooey) ]]]
Defined fooey as kablooey
--[[[end]]]
""",
'test.einstein': """\
--[[[cog cog.outl("Defined fooey as " + fooey) ]]]
Defined fooey as e=mc2
--[[[end]]]
""",
}
makeFiles(d)
self.cog.callableMain(['argv0', '-r', '-D', 'fooey=kablooey', 'test.cog'])
self.assertFilesSame('test.cog', 'test.kablooey')
makeFiles(d)
self.cog.callableMain(['argv0', '-r', '-Dfooey=kablooey', 'test.cog'])
self.assertFilesSame('test.cog', 'test.kablooey')
makeFiles(d)
self.cog.callableMain(['argv0', '-r', '-Dfooey=e=mc2', 'test.cog'])
self.assertFilesSame('test.cog', 'test.einstein')
makeFiles(d)
self.cog.callableMain(['argv0', '-r', '-Dbar=quux', '-Dfooey=kablooey', 'test.cog'])
self.assertFilesSame('test.cog', 'test.kablooey')
makeFiles(d)
self.cog.callableMain(['argv0', '-r', '-Dfooey=kablooey', '-Dbar=quux', 'test.cog'])
self.assertFilesSame('test.cog', 'test.kablooey')
makeFiles(d)
self.cog.callableMain(['argv0', '-r', '-Dfooey=gooey', '-Dfooey=kablooey', 'test.cog'])
self.assertFilesSame('test.cog', 'test.kablooey')
def testOutputToStdout(self):
d = {
'test.cog': """\
--[[[cog cog.outl('Hey there!') ]]]
--[[[end]]]
"""
}
makeFiles(d)
stderr = StringIO()
self.cog.setOutput(stderr=stderr)
self.cog.callableMain(['argv0', 'test.cog'])
output = self.output.getvalue()
outerr = stderr.getvalue()
self.assertEqual(output, "--[[[cog cog.outl('Hey there!') ]]]\nHey there!\n--[[[end]]]\n")
self.assertEqual(outerr, "")
def testReadFromStdin(self):
stdin = StringIO("--[[[cog cog.outl('Wow') ]]]\n--[[[end]]]\n")
def restore_stdin(old_stdin):
sys.stdin = old_stdin
self.addCleanup(restore_stdin, sys.stdin)
sys.stdin = stdin
stderr = StringIO()
self.cog.setOutput(stderr=stderr)
self.cog.callableMain(['argv0', '-'])
output = self.output.getvalue()
outerr = stderr.getvalue()
self.assertEqual(output, "--[[[cog cog.outl('Wow') ]]]\nWow\n--[[[end]]]\n")
self.assertEqual(outerr, "")
def testSuffixOutputLines(self):
d = {
'test.cog': """\
Hey there.
;[[[cog cog.outl('a\\nb\\n \\nc') ]]]
;[[[end]]]
Good bye.
""",
'test.out': """\
Hey there.
;[[[cog cog.outl('a\\nb\\n \\nc') ]]]
a (foo)
b (foo)
""" # These three trailing spaces are important.
# The suffix is not applied to completely blank lines.
"""
c (foo)
;[[[end]]]
Good bye.
""",
}
makeFiles(d)
self.cog.callableMain(['argv0', '-r', '-s', ' (foo)', 'test.cog'])
self.assertFilesSame('test.cog', 'test.out')
def testEmptySuffix(self):
d = {
'test.cog': """\
;[[[cog cog.outl('a\\nb\\nc') ]]]
;[[[end]]]
""",
'test.out': """\
;[[[cog cog.outl('a\\nb\\nc') ]]]
a
b
c
;[[[end]]]
""",
}
makeFiles(d)
self.cog.callableMain(['argv0', '-r', '-s', '', 'test.cog'])
self.assertFilesSame('test.cog', 'test.out')
def testHellishSuffix(self):
d = {
'test.cog': """\
;[[[cog cog.outl('a\\n\\nb') ]]]
""",
'test.out': """\
;[[[cog cog.outl('a\\n\\nb') ]]]
a /\\n*+([)]><
b /\\n*+([)]><
""",
}
makeFiles(d)
self.cog.callableMain(['argv0', '-z', '-r', '-s', r' /\n*+([)]><', 'test.cog'])
self.assertFilesSame('test.cog', 'test.out')
class WritabilityTests(TestCaseWithTempDir):
d = {
'test.cog': """\
//[[[cog
for fn in ['DoSomething', 'DoAnotherThing', 'DoLastThing']:
cog.outl("void %s();" % fn)
//]]]
//[[[end]]]
""",
'test.out': """\
//[[[cog
for fn in ['DoSomething', 'DoAnotherThing', 'DoLastThing']:
cog.outl("void %s();" % fn)
//]]]
void DoSomething();
void DoAnotherThing();
void DoLastThing();
//[[[end]]]
""",
}
if os.name == 'nt': #pragma: no cover
# for Windows
cmd_w_args = 'attrib -R %s'
cmd_w_asterisk = 'attrib -R *'
else: #pragma: no cover
# for unix-like
cmd_w_args = 'chmod +w %s'
cmd_w_asterisk = 'chmod +w *'
def setUp(self):
TestCaseWithTempDir.setUp(self)
makeFiles(self.d)
self.testcog = os.path.join(self.tempdir, 'test.cog')
os.chmod(self.testcog, stat.S_IREAD) # Make the file readonly.
assert not os.access(self.testcog, os.W_OK)
def tearDown(self):
os.chmod(self.testcog, stat.S_IWRITE) # Make the file writable again.
TestCaseWithTempDir.tearDown(self)
def testReadonlyNoCommand(self):
with self.assertRaisesRegexp(CogError, "Can't overwrite test.cog"):
self.cog.callableMain(['argv0', '-r', 'test.cog'])
assert not os.access(self.testcog, os.W_OK)
def testReadonlyWithCommand(self):
self.cog.callableMain(['argv0', '-r', '-w', self.cmd_w_args, 'test.cog'])
self.assertFilesSame('test.cog', 'test.out')
assert os.access(self.testcog, os.W_OK)
def testReadonlyWithCommandWithNoSlot(self):
self.cog.callableMain(['argv0', '-r', '-w', self.cmd_w_asterisk, 'test.cog'])
self.assertFilesSame('test.cog', 'test.out')
assert os.access(self.testcog, os.W_OK)
def testReadonlyWithIneffectualCommand(self):
with self.assertRaisesRegexp(CogError, "Couldn't make test.cog writable"):
self.cog.callableMain(['argv0', '-r', '-w', 'echo %s', 'test.cog'])
assert not os.access(self.testcog, os.W_OK)
class ChecksumTests(TestCaseWithTempDir):
def testCreateChecksumOutput(self):
d = {
'cog1.txt': """\
//[[[cog
cog.outl("This line was generated.")
//]]]
This line was generated.
//[[[end]]]
This line was not.
""",
'cog1.out': """\
//[[[cog
cog.outl("This line was generated.")
//]]]
This line was generated.
//[[[end]]] (checksum: 8adb13fb59b996a1c7f0065ea9f3d893)
This line was not.
""",
}
makeFiles(d)
self.cog.callableMain(['argv0', '-r', '-c', 'cog1.txt'])
self.assertFilesSame('cog1.txt', 'cog1.out')
def testCheckChecksumOutput(self):
d = {
'cog1.txt': """\
//[[[cog
cog.outl("This line was newly")
cog.outl("generated by cog")
cog.outl("blah blah.")
//]]]
This line was generated.
//[[[end]]] (checksum: 8adb13fb59b996a1c7f0065ea9f3d893)
""",
'cog1.out': """\
//[[[cog
cog.outl("This line was newly")
cog.outl("generated by cog")
cog.outl("blah blah.")
//]]]
This line was newly
generated by cog
blah blah.
//[[[end]]] (checksum: a8540982e5ad6b95c9e9a184b26f4346)
""",
}
makeFiles(d)
self.cog.callableMain(['argv0', '-r', '-c', 'cog1.txt'])
self.assertFilesSame('cog1.txt', 'cog1.out')
def testRemoveChecksumOutput(self):
d = {
'cog1.txt': """\
//[[[cog
cog.outl("This line was newly")
cog.outl("generated by cog")
cog.outl("blah blah.")
//]]]
This line was generated.
//[[[end]]] (checksum: 8adb13fb59b996a1c7f0065ea9f3d893) fooey
""",
'cog1.out': """\
//[[[cog
cog.outl("This line was newly")
cog.outl("generated by cog")
cog.outl("blah blah.")
//]]]
This line was newly
generated by cog
blah blah.
//[[[end]]] fooey
""",
}
makeFiles(d)
self.cog.callableMain(['argv0', '-r', 'cog1.txt'])
self.assertFilesSame('cog1.txt', 'cog1.out')
def testTamperedChecksumOutput(self):
d = {
'cog1.txt': """\
//[[[cog
cog.outl("This line was newly")
cog.outl("generated by cog")
cog.outl("blah blah.")
//]]]
Xhis line was newly
generated by cog
blah blah.
//[[[end]]] (checksum: a8540982e5ad6b95c9e9a184b26f4346)
""",
'cog2.txt': """\
//[[[cog
cog.outl("This line was newly")
cog.outl("generated by cog")
cog.outl("blah blah.")
//]]]
This line was newly
generated by cog
blah blah!
//[[[end]]] (checksum: a8540982e5ad6b95c9e9a184b26f4346)
""",
'cog3.txt': """\
//[[[cog
cog.outl("This line was newly")
cog.outl("generated by cog")
cog.outl("blah blah.")
//]]]
This line was newly
generated by cog
blah blah.
//[[[end]]] (checksum: a8540982e5ad6b95c9e9a184b26f4346)
""",
'cog4.txt': """\
//[[[cog
cog.outl("This line was newly")
cog.outl("generated by cog")
cog.outl("blah blah.")
//]]]
This line was newly
generated by cog
blah blah..
//[[[end]]] (checksum: a8540982e5ad6b95c9e9a184b26f4346)
""",
'cog5.txt': """\
//[[[cog
cog.outl("This line was newly")
cog.outl("generated by cog")
cog.outl("blah blah.")
//]]]
This line was newly
generated by cog
blah blah.
extra
//[[[end]]] (checksum: a8540982e5ad6b95c9e9a184b26f4346)
""",
'cog6.txt': """\
//[[[cog
cog.outl("This line was newly")
cog.outl("generated by cog")
cog.outl("blah blah.")
//]]]
//[[[end]]] (checksum: a8540982e5ad6b95c9e9a184b26f4346)
""",
}
makeFiles(d)
with self.assertRaisesRegexp(CogError,
r"cog1.txt\(9\): Output has been edited! Delete old checksum to unprotect."):
self.cog.callableMain(['argv0', '-c', "cog1.txt"])
with self.assertRaisesRegexp(CogError,
r"cog2.txt\(9\): Output has been edited! Delete old checksum to unprotect."):
self.cog.callableMain(['argv0', '-c', "cog2.txt"])
with self.assertRaisesRegexp(CogError,
r"cog3.txt\(10\): Output has been edited! Delete old checksum to unprotect."):
self.cog.callableMain(['argv0', '-c', "cog3.txt"])
with self.assertRaisesRegexp(CogError,
r"cog4.txt\(9\): Output has been edited! Delete old checksum to unprotect."):
self.cog.callableMain(['argv0', '-c', "cog4.txt"])
with self.assertRaisesRegexp(CogError,
r"cog5.txt\(10\): Output has been edited! Delete old checksum to unprotect."):
self.cog.callableMain(['argv0', '-c', "cog5.txt"])
with self.assertRaisesRegexp(CogError,
r"cog6.txt\(6\): Output has been edited! Delete old checksum to unprotect."):
self.cog.callableMain(['argv0', '-c', "cog6.txt"])
def testArgvIsntModified(self):
argv = ['argv0', '-v']
orig_argv = argv[:]
self.cog.callableMain(argv)
self.assertEqual(argv, orig_argv)
class CustomDelimiterTests(TestCaseWithTempDir):
def testCustomerDelimiters(self):
d = {
'test.cog': """\
//{{cog
cog.outl("void %s();" % "MyFunction")
//}}
//{{end}}
""",
'test.out': """\
//{{cog
cog.outl("void %s();" % "MyFunction")
//}}
void MyFunction();
//{{end}}
""",
}
makeFiles(d)
self.cog.callableMain([
'argv0', '-r',
'--begin-spec={{', '--end-spec=}}', '--end-output={{end}}',
'test.cog'
])
self.assertFilesSame('test.cog', 'test.out')
def testTrulyWackyDelimiters(self):
# Make sure the delimiters are properly re-escaped.
d = {
'test.cog': """\
//**(cog
cog.outl("void %s();" % "MyFunction")
//**)
//**(end)**
""",
'test.out': """\
//**(cog
cog.outl("void %s();" % "MyFunction")
//**)
void MyFunction();
//**(end)**
""",
}
makeFiles(d)
self.cog.callableMain([
'argv0', '-r',
'--begin-spec=**(', '--end-spec=**)', '--end-output=**(end)**',
'test.cog'
])
self.assertFilesSame('test.cog', 'test.out')
def testChangeJustOneDelimiter(self):
d = {
'test.cog': """\
//**(cog
cog.outl("void %s();" % "MyFunction")
//]]]
//[[[end]]]
""",
'test.out': """\
//**(cog
cog.outl("void %s();" % "MyFunction")
//]]]
void MyFunction();
//[[[end]]]
""",
}
makeFiles(d)
self.cog.callableMain([
'argv0', '-r',
'--begin-spec=**(',
'test.cog'
])
self.assertFilesSame('test.cog', 'test.out')
class BlakeTests(TestCaseWithTempDir):
# Blake Winton's contributions.
def testDeleteCode(self):
# -o sets the output file.
d = {
'test.cog': """\
// This is my C++ file.
//[[[cog
fnames = ['DoSomething', 'DoAnotherThing', 'DoLastThing']
for fn in fnames:
cog.outl("void %s();" % fn)
//]]]
Some Sample Code Here
//[[[end]]]Data Data
And Some More
""",
'test.out': """\
// This is my C++ file.
void DoSomething();
void DoAnotherThing();
void DoLastThing();
And Some More
""",
}
makeFiles(d)
self.cog.callableMain(['argv0', '-d', '-o', 'test.cogged', 'test.cog'])
self.assertFilesSame('test.cogged', 'test.out')
def testDeleteCodeWithDashRFails(self):
d = {
'test.cog': """\
// This is my C++ file.
"""
}
makeFiles(d)
with self.assertRaises(CogUsageError):
self.cog.callableMain(['argv0', '-r', '-d', 'test.cog'])
def testSettingGlobals(self):
# Blake Winton contributed a way to set the globals that will be used in
# processFile().
d = {
'test.cog': """\
// This is my C++ file.
//[[[cog
for fn in fnames:
cog.outl("void %s();" % fn)
//]]]
Some Sample Code Here
//[[[end]]]""",
'test.out': """\
// This is my C++ file.
void DoBlake();
void DoWinton();
void DoContribution();
""",
}
makeFiles(d)
globals = {}
globals['fnames'] = ['DoBlake', 'DoWinton', 'DoContribution']
self.cog.options.bDeleteCode = True
self.cog.processFile('test.cog', 'test.cogged', globals=globals)
self.assertFilesSame('test.cogged', 'test.out')
class ErrorCallTests(TestCaseWithTempDir):
def testErrorCallHasNoTraceback(self):
# Test that cog.error() doesn't show a traceback.
d = {
'error.cog': """\
//[[[cog
cog.error("Something Bad!")
//]]]
//[[[end]]]
""",
}
makeFiles(d)
self.cog.main(['argv0', '-r', 'error.cog'])
output = self.output.getvalue()
self.assertEqual(output, "Cogging error.cog\nError: Something Bad!\n")
def testRealErrorHasTraceback(self):
# Test that a genuine error does show a traceback.
d = {
'error.cog': """\
//[[[cog
raise RuntimeError("Hey!")
//]]]
//[[[end]]]
""",
}
makeFiles(d)
self.cog.main(['argv0', '-r', 'error.cog'])
output = self.output.getvalue()
msg = 'Actual output:\n' + output
self.assert_(output.startswith("Cogging error.cog\nTraceback (most recent"), msg)
self.assertIn("RuntimeError: Hey!", output)
# Things not yet tested:
# - A bad -w command (currently fails silently).
| mpl-2.0 | -2,230,558,047,872,296,200 | 29.733703 | 212 | 0.44122 | false |
Cinntax/home-assistant | homeassistant/components/switch/__init__.py | 1 | 3209 | """Component to interface with switches that can be controlled remotely."""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.loader import bind_hass
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.helpers.config_validation import ( # noqa
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
)
from homeassistant.const import (
STATE_ON,
SERVICE_TURN_ON,
SERVICE_TURN_OFF,
SERVICE_TOGGLE,
)
from homeassistant.components import group
# mypy: allow-untyped-defs, no-check-untyped-defs
DOMAIN = "switch"
SCAN_INTERVAL = timedelta(seconds=30)
GROUP_NAME_ALL_SWITCHES = "all switches"
ENTITY_ID_ALL_SWITCHES = group.ENTITY_ID_FORMAT.format("all_switches")
ENTITY_ID_FORMAT = DOMAIN + ".{}"
ATTR_TODAY_ENERGY_KWH = "today_energy_kwh"
ATTR_CURRENT_POWER_W = "current_power_w"
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
PROP_TO_ATTR = {
"current_power_w": ATTR_CURRENT_POWER_W,
"today_energy_kwh": ATTR_TODAY_ENERGY_KWH,
}
DEVICE_CLASS_OUTLET = "outlet"
DEVICE_CLASS_SWITCH = "switch"
DEVICE_CLASSES = [DEVICE_CLASS_OUTLET, DEVICE_CLASS_SWITCH]
DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.In(DEVICE_CLASSES))
_LOGGER = logging.getLogger(__name__)
@bind_hass
def is_on(hass, entity_id=None):
"""Return if the switch is on based on the statemachine.
Async friendly.
"""
entity_id = entity_id or ENTITY_ID_ALL_SWITCHES
return hass.states.is_state(entity_id, STATE_ON)
async def async_setup(hass, config):
"""Track states and offer events for switches."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL, GROUP_NAME_ALL_SWITCHES
)
await component.async_setup(config)
component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off")
component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on")
component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle")
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class SwitchDevice(ToggleEntity):
"""Representation of a switch."""
@property
def current_power_w(self):
"""Return the current power usage in W."""
return None
@property
def today_energy_kwh(self):
"""Return the today total energy usage in kWh."""
return None
@property
def is_standby(self):
"""Return true if device is in standby."""
return None
@property
def state_attributes(self):
"""Return the optional state attributes."""
data = {}
for prop, attr in PROP_TO_ATTR.items():
value = getattr(self, prop)
if value:
data[attr] = value
return data
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return None
| apache-2.0 | 5,445,250,433,488,144,000 | 25.741667 | 83 | 0.684637 | false |
alec-heif/MIT-Thesis | spark-bin/python/pyspark/ml/param/shared.py | 15 | 22317 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# DO NOT MODIFY THIS FILE! It was generated by _shared_params_code_gen.py.
from pyspark.ml.param import *
class HasMaxIter(Params):
"""
Mixin for param maxIter: max number of iterations (>= 0).
"""
maxIter = Param(Params._dummy(), "maxIter", "max number of iterations (>= 0).", typeConverter=TypeConverters.toInt)
def __init__(self):
super(HasMaxIter, self).__init__()
def setMaxIter(self, value):
"""
Sets the value of :py:attr:`maxIter`.
"""
return self._set(maxIter=value)
def getMaxIter(self):
"""
Gets the value of maxIter or its default value.
"""
return self.getOrDefault(self.maxIter)
class HasRegParam(Params):
"""
Mixin for param regParam: regularization parameter (>= 0).
"""
regParam = Param(Params._dummy(), "regParam", "regularization parameter (>= 0).", typeConverter=TypeConverters.toFloat)
def __init__(self):
super(HasRegParam, self).__init__()
def setRegParam(self, value):
"""
Sets the value of :py:attr:`regParam`.
"""
return self._set(regParam=value)
def getRegParam(self):
"""
Gets the value of regParam or its default value.
"""
return self.getOrDefault(self.regParam)
class HasFeaturesCol(Params):
"""
Mixin for param featuresCol: features column name.
"""
featuresCol = Param(Params._dummy(), "featuresCol", "features column name.", typeConverter=TypeConverters.toString)
def __init__(self):
super(HasFeaturesCol, self).__init__()
self._setDefault(featuresCol='features')
def setFeaturesCol(self, value):
"""
Sets the value of :py:attr:`featuresCol`.
"""
return self._set(featuresCol=value)
def getFeaturesCol(self):
"""
Gets the value of featuresCol or its default value.
"""
return self.getOrDefault(self.featuresCol)
class HasLabelCol(Params):
"""
Mixin for param labelCol: label column name.
"""
labelCol = Param(Params._dummy(), "labelCol", "label column name.", typeConverter=TypeConverters.toString)
def __init__(self):
super(HasLabelCol, self).__init__()
self._setDefault(labelCol='label')
def setLabelCol(self, value):
"""
Sets the value of :py:attr:`labelCol`.
"""
return self._set(labelCol=value)
def getLabelCol(self):
"""
Gets the value of labelCol or its default value.
"""
return self.getOrDefault(self.labelCol)
class HasPredictionCol(Params):
"""
Mixin for param predictionCol: prediction column name.
"""
predictionCol = Param(Params._dummy(), "predictionCol", "prediction column name.", typeConverter=TypeConverters.toString)
def __init__(self):
super(HasPredictionCol, self).__init__()
self._setDefault(predictionCol='prediction')
def setPredictionCol(self, value):
"""
Sets the value of :py:attr:`predictionCol`.
"""
return self._set(predictionCol=value)
def getPredictionCol(self):
"""
Gets the value of predictionCol or its default value.
"""
return self.getOrDefault(self.predictionCol)
class HasProbabilityCol(Params):
"""
Mixin for param probabilityCol: Column name for predicted class conditional probabilities. Note: Not all models output well-calibrated probability estimates! These probabilities should be treated as confidences, not precise probabilities.
"""
probabilityCol = Param(Params._dummy(), "probabilityCol", "Column name for predicted class conditional probabilities. Note: Not all models output well-calibrated probability estimates! These probabilities should be treated as confidences, not precise probabilities.", typeConverter=TypeConverters.toString)
def __init__(self):
super(HasProbabilityCol, self).__init__()
self._setDefault(probabilityCol='probability')
def setProbabilityCol(self, value):
"""
Sets the value of :py:attr:`probabilityCol`.
"""
return self._set(probabilityCol=value)
def getProbabilityCol(self):
"""
Gets the value of probabilityCol or its default value.
"""
return self.getOrDefault(self.probabilityCol)
class HasRawPredictionCol(Params):
"""
Mixin for param rawPredictionCol: raw prediction (a.k.a. confidence) column name.
"""
rawPredictionCol = Param(Params._dummy(), "rawPredictionCol", "raw prediction (a.k.a. confidence) column name.", typeConverter=TypeConverters.toString)
def __init__(self):
super(HasRawPredictionCol, self).__init__()
self._setDefault(rawPredictionCol='rawPrediction')
def setRawPredictionCol(self, value):
"""
Sets the value of :py:attr:`rawPredictionCol`.
"""
return self._set(rawPredictionCol=value)
def getRawPredictionCol(self):
"""
Gets the value of rawPredictionCol or its default value.
"""
return self.getOrDefault(self.rawPredictionCol)
class HasInputCol(Params):
"""
Mixin for param inputCol: input column name.
"""
inputCol = Param(Params._dummy(), "inputCol", "input column name.", typeConverter=TypeConverters.toString)
def __init__(self):
super(HasInputCol, self).__init__()
def setInputCol(self, value):
"""
Sets the value of :py:attr:`inputCol`.
"""
return self._set(inputCol=value)
def getInputCol(self):
"""
Gets the value of inputCol or its default value.
"""
return self.getOrDefault(self.inputCol)
class HasInputCols(Params):
"""
Mixin for param inputCols: input column names.
"""
inputCols = Param(Params._dummy(), "inputCols", "input column names.", typeConverter=TypeConverters.toListString)
def __init__(self):
super(HasInputCols, self).__init__()
def setInputCols(self, value):
"""
Sets the value of :py:attr:`inputCols`.
"""
return self._set(inputCols=value)
def getInputCols(self):
"""
Gets the value of inputCols or its default value.
"""
return self.getOrDefault(self.inputCols)
class HasOutputCol(Params):
"""
Mixin for param outputCol: output column name.
"""
outputCol = Param(Params._dummy(), "outputCol", "output column name.", typeConverter=TypeConverters.toString)
def __init__(self):
super(HasOutputCol, self).__init__()
self._setDefault(outputCol=self.uid + '__output')
def setOutputCol(self, value):
"""
Sets the value of :py:attr:`outputCol`.
"""
return self._set(outputCol=value)
def getOutputCol(self):
"""
Gets the value of outputCol or its default value.
"""
return self.getOrDefault(self.outputCol)
class HasNumFeatures(Params):
"""
Mixin for param numFeatures: number of features.
"""
numFeatures = Param(Params._dummy(), "numFeatures", "number of features.", typeConverter=TypeConverters.toInt)
def __init__(self):
super(HasNumFeatures, self).__init__()
def setNumFeatures(self, value):
"""
Sets the value of :py:attr:`numFeatures`.
"""
return self._set(numFeatures=value)
def getNumFeatures(self):
"""
Gets the value of numFeatures or its default value.
"""
return self.getOrDefault(self.numFeatures)
class HasCheckpointInterval(Params):
"""
Mixin for param checkpointInterval: set checkpoint interval (>= 1) or disable checkpoint (-1). E.g. 10 means that the cache will get checkpointed every 10 iterations.
"""
checkpointInterval = Param(Params._dummy(), "checkpointInterval", "set checkpoint interval (>= 1) or disable checkpoint (-1). E.g. 10 means that the cache will get checkpointed every 10 iterations.", typeConverter=TypeConverters.toInt)
def __init__(self):
super(HasCheckpointInterval, self).__init__()
def setCheckpointInterval(self, value):
"""
Sets the value of :py:attr:`checkpointInterval`.
"""
return self._set(checkpointInterval=value)
def getCheckpointInterval(self):
"""
Gets the value of checkpointInterval or its default value.
"""
return self.getOrDefault(self.checkpointInterval)
class HasSeed(Params):
"""
Mixin for param seed: random seed.
"""
seed = Param(Params._dummy(), "seed", "random seed.", typeConverter=TypeConverters.toInt)
def __init__(self):
super(HasSeed, self).__init__()
self._setDefault(seed=hash(type(self).__name__))
def setSeed(self, value):
"""
Sets the value of :py:attr:`seed`.
"""
return self._set(seed=value)
def getSeed(self):
"""
Gets the value of seed or its default value.
"""
return self.getOrDefault(self.seed)
class HasTol(Params):
"""
Mixin for param tol: the convergence tolerance for iterative algorithms (>= 0).
"""
tol = Param(Params._dummy(), "tol", "the convergence tolerance for iterative algorithms (>= 0).", typeConverter=TypeConverters.toFloat)
def __init__(self):
super(HasTol, self).__init__()
def setTol(self, value):
"""
Sets the value of :py:attr:`tol`.
"""
return self._set(tol=value)
def getTol(self):
"""
Gets the value of tol or its default value.
"""
return self.getOrDefault(self.tol)
class HasStepSize(Params):
"""
Mixin for param stepSize: Step size to be used for each iteration of optimization (>= 0).
"""
stepSize = Param(Params._dummy(), "stepSize", "Step size to be used for each iteration of optimization (>= 0).", typeConverter=TypeConverters.toFloat)
def __init__(self):
super(HasStepSize, self).__init__()
def setStepSize(self, value):
"""
Sets the value of :py:attr:`stepSize`.
"""
return self._set(stepSize=value)
def getStepSize(self):
"""
Gets the value of stepSize or its default value.
"""
return self.getOrDefault(self.stepSize)
class HasHandleInvalid(Params):
"""
Mixin for param handleInvalid: how to handle invalid entries. Options are skip (which will filter out rows with bad values), or error (which will throw an error). More options may be added later.
"""
handleInvalid = Param(Params._dummy(), "handleInvalid", "how to handle invalid entries. Options are skip (which will filter out rows with bad values), or error (which will throw an error). More options may be added later.", typeConverter=TypeConverters.toString)
def __init__(self):
super(HasHandleInvalid, self).__init__()
def setHandleInvalid(self, value):
"""
Sets the value of :py:attr:`handleInvalid`.
"""
return self._set(handleInvalid=value)
def getHandleInvalid(self):
"""
Gets the value of handleInvalid or its default value.
"""
return self.getOrDefault(self.handleInvalid)
class HasElasticNetParam(Params):
"""
Mixin for param elasticNetParam: the ElasticNet mixing parameter, in range [0, 1]. For alpha = 0, the penalty is an L2 penalty. For alpha = 1, it is an L1 penalty.
"""
elasticNetParam = Param(Params._dummy(), "elasticNetParam", "the ElasticNet mixing parameter, in range [0, 1]. For alpha = 0, the penalty is an L2 penalty. For alpha = 1, it is an L1 penalty.", typeConverter=TypeConverters.toFloat)
def __init__(self):
super(HasElasticNetParam, self).__init__()
self._setDefault(elasticNetParam=0.0)
def setElasticNetParam(self, value):
"""
Sets the value of :py:attr:`elasticNetParam`.
"""
return self._set(elasticNetParam=value)
def getElasticNetParam(self):
"""
Gets the value of elasticNetParam or its default value.
"""
return self.getOrDefault(self.elasticNetParam)
class HasFitIntercept(Params):
"""
Mixin for param fitIntercept: whether to fit an intercept term.
"""
fitIntercept = Param(Params._dummy(), "fitIntercept", "whether to fit an intercept term.", typeConverter=TypeConverters.toBoolean)
def __init__(self):
super(HasFitIntercept, self).__init__()
self._setDefault(fitIntercept=True)
def setFitIntercept(self, value):
"""
Sets the value of :py:attr:`fitIntercept`.
"""
return self._set(fitIntercept=value)
def getFitIntercept(self):
"""
Gets the value of fitIntercept or its default value.
"""
return self.getOrDefault(self.fitIntercept)
class HasStandardization(Params):
"""
Mixin for param standardization: whether to standardize the training features before fitting the model.
"""
standardization = Param(Params._dummy(), "standardization", "whether to standardize the training features before fitting the model.", typeConverter=TypeConverters.toBoolean)
def __init__(self):
super(HasStandardization, self).__init__()
self._setDefault(standardization=True)
def setStandardization(self, value):
"""
Sets the value of :py:attr:`standardization`.
"""
return self._set(standardization=value)
def getStandardization(self):
"""
Gets the value of standardization or its default value.
"""
return self.getOrDefault(self.standardization)
class HasThresholds(Params):
"""
Mixin for param thresholds: Thresholds in multi-class classification to adjust the probability of predicting each class. Array must have length equal to the number of classes, with values > 0, excepting that at most one value may be 0. The class with largest value p/t is predicted, where p is the original probability of that class and t is the class's threshold.
"""
thresholds = Param(Params._dummy(), "thresholds", "Thresholds in multi-class classification to adjust the probability of predicting each class. Array must have length equal to the number of classes, with values > 0, excepting that at most one value may be 0. The class with largest value p/t is predicted, where p is the original probability of that class and t is the class's threshold.", typeConverter=TypeConverters.toListFloat)
def __init__(self):
super(HasThresholds, self).__init__()
def setThresholds(self, value):
"""
Sets the value of :py:attr:`thresholds`.
"""
return self._set(thresholds=value)
def getThresholds(self):
"""
Gets the value of thresholds or its default value.
"""
return self.getOrDefault(self.thresholds)
class HasWeightCol(Params):
"""
Mixin for param weightCol: weight column name. If this is not set or empty, we treat all instance weights as 1.0.
"""
weightCol = Param(Params._dummy(), "weightCol", "weight column name. If this is not set or empty, we treat all instance weights as 1.0.", typeConverter=TypeConverters.toString)
def __init__(self):
super(HasWeightCol, self).__init__()
def setWeightCol(self, value):
"""
Sets the value of :py:attr:`weightCol`.
"""
return self._set(weightCol=value)
def getWeightCol(self):
"""
Gets the value of weightCol or its default value.
"""
return self.getOrDefault(self.weightCol)
class HasSolver(Params):
"""
Mixin for param solver: the solver algorithm for optimization. If this is not set or empty, default value is 'auto'.
"""
solver = Param(Params._dummy(), "solver", "the solver algorithm for optimization. If this is not set or empty, default value is 'auto'.", typeConverter=TypeConverters.toString)
def __init__(self):
super(HasSolver, self).__init__()
self._setDefault(solver='auto')
def setSolver(self, value):
"""
Sets the value of :py:attr:`solver`.
"""
return self._set(solver=value)
def getSolver(self):
"""
Gets the value of solver or its default value.
"""
return self.getOrDefault(self.solver)
class HasVarianceCol(Params):
"""
Mixin for param varianceCol: column name for the biased sample variance of prediction.
"""
varianceCol = Param(Params._dummy(), "varianceCol", "column name for the biased sample variance of prediction.", typeConverter=TypeConverters.toString)
def __init__(self):
super(HasVarianceCol, self).__init__()
def setVarianceCol(self, value):
"""
Sets the value of :py:attr:`varianceCol`.
"""
return self._set(varianceCol=value)
def getVarianceCol(self):
"""
Gets the value of varianceCol or its default value.
"""
return self.getOrDefault(self.varianceCol)
class HasAggregationDepth(Params):
"""
Mixin for param aggregationDepth: suggested depth for treeAggregate (>= 2).
"""
aggregationDepth = Param(Params._dummy(), "aggregationDepth", "suggested depth for treeAggregate (>= 2).", typeConverter=TypeConverters.toInt)
def __init__(self):
super(HasAggregationDepth, self).__init__()
self._setDefault(aggregationDepth=2)
def setAggregationDepth(self, value):
"""
Sets the value of :py:attr:`aggregationDepth`.
"""
return self._set(aggregationDepth=value)
def getAggregationDepth(self):
"""
Gets the value of aggregationDepth or its default value.
"""
return self.getOrDefault(self.aggregationDepth)
class DecisionTreeParams(Params):
"""
Mixin for Decision Tree parameters.
"""
maxDepth = Param(Params._dummy(), "maxDepth", "Maximum depth of the tree. (>= 0) E.g., depth 0 means 1 leaf node; depth 1 means 1 internal node + 2 leaf nodes.", typeConverter=TypeConverters.toInt)
maxBins = Param(Params._dummy(), "maxBins", "Max number of bins for discretizing continuous features. Must be >=2 and >= number of categories for any categorical feature.", typeConverter=TypeConverters.toInt)
minInstancesPerNode = Param(Params._dummy(), "minInstancesPerNode", "Minimum number of instances each child must have after split. If a split causes the left or right child to have fewer than minInstancesPerNode, the split will be discarded as invalid. Should be >= 1.", typeConverter=TypeConverters.toInt)
minInfoGain = Param(Params._dummy(), "minInfoGain", "Minimum information gain for a split to be considered at a tree node.", typeConverter=TypeConverters.toFloat)
maxMemoryInMB = Param(Params._dummy(), "maxMemoryInMB", "Maximum memory in MB allocated to histogram aggregation. If too small, then 1 node will be split per iteration, and its aggregates may exceed this size.", typeConverter=TypeConverters.toInt)
cacheNodeIds = Param(Params._dummy(), "cacheNodeIds", "If false, the algorithm will pass trees to executors to match instances with nodes. If true, the algorithm will cache node IDs for each instance. Caching can speed up training of deeper trees. Users can set how often should the cache be checkpointed or disable it by setting checkpointInterval.", typeConverter=TypeConverters.toBoolean)
def __init__(self):
super(DecisionTreeParams, self).__init__()
def setMaxDepth(self, value):
"""
Sets the value of :py:attr:`maxDepth`.
"""
return self._set(maxDepth=value)
def getMaxDepth(self):
"""
Gets the value of maxDepth or its default value.
"""
return self.getOrDefault(self.maxDepth)
def setMaxBins(self, value):
"""
Sets the value of :py:attr:`maxBins`.
"""
return self._set(maxBins=value)
def getMaxBins(self):
"""
Gets the value of maxBins or its default value.
"""
return self.getOrDefault(self.maxBins)
def setMinInstancesPerNode(self, value):
"""
Sets the value of :py:attr:`minInstancesPerNode`.
"""
return self._set(minInstancesPerNode=value)
def getMinInstancesPerNode(self):
"""
Gets the value of minInstancesPerNode or its default value.
"""
return self.getOrDefault(self.minInstancesPerNode)
def setMinInfoGain(self, value):
"""
Sets the value of :py:attr:`minInfoGain`.
"""
return self._set(minInfoGain=value)
def getMinInfoGain(self):
"""
Gets the value of minInfoGain or its default value.
"""
return self.getOrDefault(self.minInfoGain)
def setMaxMemoryInMB(self, value):
"""
Sets the value of :py:attr:`maxMemoryInMB`.
"""
return self._set(maxMemoryInMB=value)
def getMaxMemoryInMB(self):
"""
Gets the value of maxMemoryInMB or its default value.
"""
return self.getOrDefault(self.maxMemoryInMB)
def setCacheNodeIds(self, value):
"""
Sets the value of :py:attr:`cacheNodeIds`.
"""
return self._set(cacheNodeIds=value)
def getCacheNodeIds(self):
"""
Gets the value of cacheNodeIds or its default value.
"""
return self.getOrDefault(self.cacheNodeIds)
| mit | 3,261,565,614,167,070,700 | 32.111276 | 435 | 0.64704 | false |
Asana/boto | tests/unit/beanstalk/test_layer1.py | 114 | 6971 | #!/usr/bin/env python
import json
from tests.unit import AWSMockServiceTestCase
from boto.beanstalk.layer1 import Layer1
# These tests are just checking the basic structure of
# the Elastic Beanstalk code, by picking a few calls
# and verifying we get the expected results with mocked
# responses. The integration tests actually verify the
# API calls interact with the service correctly.
class TestListAvailableSolutionStacks(AWSMockServiceTestCase):
connection_class = Layer1
def default_body(self):
return json.dumps(
{u'ListAvailableSolutionStacksResponse':
{u'ListAvailableSolutionStacksResult':
{u'SolutionStackDetails': [
{u'PermittedFileTypes': [u'war', u'zip'],
u'SolutionStackName': u'32bit Amazon Linux running Tomcat 7'},
{u'PermittedFileTypes': [u'zip'],
u'SolutionStackName': u'32bit Amazon Linux running PHP 5.3'}],
u'SolutionStacks': [u'32bit Amazon Linux running Tomcat 7',
u'32bit Amazon Linux running PHP 5.3']},
u'ResponseMetadata': {u'RequestId': u'request_id'}}}).encode('utf-8')
def test_list_available_solution_stacks(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.list_available_solution_stacks()
stack_details = api_response['ListAvailableSolutionStacksResponse']\
['ListAvailableSolutionStacksResult']\
['SolutionStackDetails']
solution_stacks = api_response['ListAvailableSolutionStacksResponse']\
['ListAvailableSolutionStacksResult']\
['SolutionStacks']
self.assertEqual(solution_stacks,
[u'32bit Amazon Linux running Tomcat 7',
u'32bit Amazon Linux running PHP 5.3'])
# These are the parameters that are actually sent to the CloudFormation
# service.
self.assert_request_parameters({
'Action': 'ListAvailableSolutionStacks',
'ContentType': 'JSON',
'Version': '2010-12-01',
})
class TestCreateApplicationVersion(AWSMockServiceTestCase):
connection_class = Layer1
def default_body(self):
return json.dumps({
'CreateApplicationVersionResponse':
{u'CreateApplicationVersionResult':
{u'ApplicationVersion':
{u'ApplicationName': u'application1',
u'DateCreated': 1343067094.342,
u'DateUpdated': 1343067094.342,
u'Description': None,
u'SourceBundle': {u'S3Bucket': u'elasticbeanstalk-us-east-1',
u'S3Key': u'resources/elasticbeanstalk-sampleapp.war'},
u'VersionLabel': u'version1'}}}}).encode('utf-8')
def test_create_application_version(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_application_version(
'application1', 'version1', s3_bucket='mybucket', s3_key='mykey',
auto_create_application=True)
app_version = api_response['CreateApplicationVersionResponse']\
['CreateApplicationVersionResult']\
['ApplicationVersion']
self.assert_request_parameters({
'Action': 'CreateApplicationVersion',
'ContentType': 'JSON',
'Version': '2010-12-01',
'ApplicationName': 'application1',
'AutoCreateApplication': 'true',
'SourceBundle.S3Bucket': 'mybucket',
'SourceBundle.S3Key': 'mykey',
'VersionLabel': 'version1',
})
self.assertEqual(app_version['ApplicationName'], 'application1')
self.assertEqual(app_version['VersionLabel'], 'version1')
class TestCreateEnvironment(AWSMockServiceTestCase):
connection_class = Layer1
def default_body(self):
return json.dumps({}).encode('utf-8')
def test_create_environment(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_environment(
'application1', 'environment1', 'version1',
'32bit Amazon Linux running Tomcat 7',
option_settings=[
('aws:autoscaling:launchconfiguration', 'Ec2KeyName',
'mykeypair'),
('aws:elasticbeanstalk:application:environment', 'ENVVAR',
'VALUE1')])
self.assert_request_parameters({
'Action': 'CreateEnvironment',
'ApplicationName': 'application1',
'EnvironmentName': 'environment1',
'TemplateName': '32bit Amazon Linux running Tomcat 7',
'ContentType': 'JSON',
'Version': '2010-12-01',
'VersionLabel': 'version1',
'OptionSettings.member.1.Namespace': 'aws:autoscaling:launchconfiguration',
'OptionSettings.member.1.OptionName': 'Ec2KeyName',
'OptionSettings.member.1.Value': 'mykeypair',
'OptionSettings.member.2.Namespace': 'aws:elasticbeanstalk:application:environment',
'OptionSettings.member.2.OptionName': 'ENVVAR',
'OptionSettings.member.2.Value': 'VALUE1',
})
def test_create_environment_with_tier(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_environment(
'application1', 'environment1', 'version1',
'32bit Amazon Linux running Tomcat 7',
option_settings=[
('aws:autoscaling:launchconfiguration', 'Ec2KeyName',
'mykeypair'),
('aws:elasticbeanstalk:application:environment', 'ENVVAR',
'VALUE1')],
tier_name='Worker', tier_type='SQS/HTTP', tier_version='1.0')
self.assert_request_parameters({
'Action': 'CreateEnvironment',
'ApplicationName': 'application1',
'EnvironmentName': 'environment1',
'TemplateName': '32bit Amazon Linux running Tomcat 7',
'ContentType': 'JSON',
'Version': '2010-12-01',
'VersionLabel': 'version1',
'OptionSettings.member.1.Namespace': 'aws:autoscaling:launchconfiguration',
'OptionSettings.member.1.OptionName': 'Ec2KeyName',
'OptionSettings.member.1.Value': 'mykeypair',
'OptionSettings.member.2.Namespace': 'aws:elasticbeanstalk:application:environment',
'OptionSettings.member.2.OptionName': 'ENVVAR',
'OptionSettings.member.2.Value': 'VALUE1',
'Tier.Name': 'Worker',
'Tier.Type': 'SQS/HTTP',
'Tier.Version': '1.0',
})
| mit | 7,795,050,611,015,226,000 | 45.785235 | 96 | 0.591163 | false |
nycholas/ask-undrgz | src/ask-undrgz/django/db/backends/postgresql/introspection.py | 308 | 3725 | from django.db.backends import BaseDatabaseIntrospection
class DatabaseIntrospection(BaseDatabaseIntrospection):
# Maps type codes to Django Field types.
data_types_reverse = {
16: 'BooleanField',
20: 'BigIntegerField',
21: 'SmallIntegerField',
23: 'IntegerField',
25: 'TextField',
700: 'FloatField',
701: 'FloatField',
869: 'IPAddressField',
1043: 'CharField',
1082: 'DateField',
1083: 'TimeField',
1114: 'DateTimeField',
1184: 'DateTimeField',
1266: 'TimeField',
1700: 'DecimalField',
}
def get_table_list(self, cursor):
"Returns a list of table names in the current database."
cursor.execute("""
SELECT c.relname
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('r', 'v', '')
AND n.nspname NOT IN ('pg_catalog', 'pg_toast')
AND pg_catalog.pg_table_is_visible(c.oid)""")
return [row[0] for row in cursor.fetchall()]
def get_table_description(self, cursor, table_name):
"Returns a description of the table, with the DB-API cursor.description interface."
cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
return cursor.description
def get_relations(self, cursor, table_name):
"""
Returns a dictionary of {field_index: (field_index_other_table, other_table)}
representing all relationships to the given table. Indexes are 0-based.
"""
cursor.execute("""
SELECT con.conkey, con.confkey, c2.relname
FROM pg_constraint con, pg_class c1, pg_class c2
WHERE c1.oid = con.conrelid
AND c2.oid = con.confrelid
AND c1.relname = %s
AND con.contype = 'f'""", [table_name])
relations = {}
for row in cursor.fetchall():
try:
# row[0] and row[1] are like "{2}", so strip the curly braces.
relations[int(row[0][1:-1]) - 1] = (int(row[1][1:-1]) - 1, row[2])
except ValueError:
continue
return relations
def get_indexes(self, cursor, table_name):
"""
Returns a dictionary of fieldname -> infodict for the given table,
where each infodict is in the format:
{'primary_key': boolean representing whether it's the primary key,
'unique': boolean representing whether it's a unique index}
"""
# This query retrieves each index on the given table, including the
# first associated field name
cursor.execute("""
SELECT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary
FROM pg_catalog.pg_class c, pg_catalog.pg_class c2,
pg_catalog.pg_index idx, pg_catalog.pg_attribute attr
WHERE c.oid = idx.indrelid
AND idx.indexrelid = c2.oid
AND attr.attrelid = c.oid
AND attr.attnum = idx.indkey[0]
AND c.relname = %s""", [table_name])
indexes = {}
for row in cursor.fetchall():
# row[1] (idx.indkey) is stored in the DB as an array. It comes out as
# a string of space-separated integers. This designates the field
# indexes (1-based) of the fields that have indexes on the table.
# Here, we skip any indexes across multiple fields.
if ' ' in row[1]:
continue
indexes[row[0]] = {'primary_key': row[3], 'unique': row[2]}
return indexes
| bsd-3-clause | 1,543,683,538,963,734,300 | 41.329545 | 95 | 0.572617 | false |
scifiswapnil/Project-LoCatr | lib/python2.7/site-packages/django/contrib/postgres/forms/ranges.py | 132 | 3063 | from psycopg2.extras import DateRange, DateTimeTZRange, NumericRange
from django import forms
from django.core import exceptions
from django.forms.widgets import MultiWidget
from django.utils.translation import ugettext_lazy as _
__all__ = ['IntegerRangeField', 'FloatRangeField', 'DateTimeRangeField', 'DateRangeField']
class BaseRangeField(forms.MultiValueField):
default_error_messages = {
'invalid': _('Enter two valid values.'),
'bound_ordering': _('The start of the range must not exceed the end of the range.'),
}
def __init__(self, **kwargs):
if 'widget' not in kwargs:
kwargs['widget'] = RangeWidget(self.base_field.widget)
if 'fields' not in kwargs:
kwargs['fields'] = [self.base_field(required=False), self.base_field(required=False)]
kwargs.setdefault('required', False)
kwargs.setdefault('require_all_fields', False)
super(BaseRangeField, self).__init__(**kwargs)
def prepare_value(self, value):
lower_base, upper_base = self.fields
if isinstance(value, self.range_type):
return [
lower_base.prepare_value(value.lower),
upper_base.prepare_value(value.upper),
]
if value is None:
return [
lower_base.prepare_value(None),
upper_base.prepare_value(None),
]
return value
def compress(self, values):
if not values:
return None
lower, upper = values
if lower is not None and upper is not None and lower > upper:
raise exceptions.ValidationError(
self.error_messages['bound_ordering'],
code='bound_ordering',
)
try:
range_value = self.range_type(lower, upper)
except TypeError:
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
)
else:
return range_value
class IntegerRangeField(BaseRangeField):
default_error_messages = {'invalid': _('Enter two whole numbers.')}
base_field = forms.IntegerField
range_type = NumericRange
class FloatRangeField(BaseRangeField):
default_error_messages = {'invalid': _('Enter two numbers.')}
base_field = forms.FloatField
range_type = NumericRange
class DateTimeRangeField(BaseRangeField):
default_error_messages = {'invalid': _('Enter two valid date/times.')}
base_field = forms.DateTimeField
range_type = DateTimeTZRange
class DateRangeField(BaseRangeField):
default_error_messages = {'invalid': _('Enter two valid dates.')}
base_field = forms.DateField
range_type = DateRange
class RangeWidget(MultiWidget):
def __init__(self, base_widget, attrs=None):
widgets = (base_widget, base_widget)
super(RangeWidget, self).__init__(widgets, attrs)
def decompress(self, value):
if value:
return (value.lower, value.upper)
return (None, None)
| mit | -2,232,849,063,356,993,500 | 32.293478 | 97 | 0.623898 | false |
xuanyuanking/spark | python/pyspark/pandas/data_type_ops/base.py | 5 | 13688 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numbers
from abc import ABCMeta
from itertools import chain
from typing import Any, Optional, Union
import numpy as np
import pandas as pd
from pandas.api.types import CategoricalDtype
from pyspark.sql import functions as F
from pyspark.sql.types import (
ArrayType,
BinaryType,
BooleanType,
DataType,
DateType,
DecimalType,
FractionalType,
IntegralType,
MapType,
NullType,
NumericType,
StringType,
StructType,
TimestampType,
UserDefinedType,
)
from pyspark.pandas._typing import Dtype, IndexOpsLike, SeriesOrIndex
from pyspark.pandas.spark import functions as SF
from pyspark.pandas.typedef import extension_dtypes
from pyspark.pandas.typedef.typehints import (
extension_dtypes_available,
extension_float_dtypes_available,
extension_object_dtypes_available,
)
if extension_dtypes_available:
from pandas import Int8Dtype, Int16Dtype, Int32Dtype, Int64Dtype
if extension_float_dtypes_available:
from pandas import Float32Dtype, Float64Dtype
if extension_object_dtypes_available:
from pandas import BooleanDtype, StringDtype
def is_valid_operand_for_numeric_arithmetic(operand: Any, *, allow_bool: bool = True) -> bool:
"""Check whether the `operand` is valid for arithmetic operations against numerics."""
from pyspark.pandas.base import IndexOpsMixin
if isinstance(operand, numbers.Number):
return not isinstance(operand, bool) or allow_bool
elif isinstance(operand, IndexOpsMixin):
if isinstance(operand.dtype, CategoricalDtype):
return False
else:
return isinstance(operand.spark.data_type, NumericType) or (
allow_bool and isinstance(operand.spark.data_type, BooleanType)
)
else:
return False
def transform_boolean_operand_to_numeric(
operand: Any, spark_type: Optional[DataType] = None
) -> Any:
"""Transform boolean operand to numeric.
If the `operand` is:
- a boolean IndexOpsMixin, transform the `operand` to the `spark_type`.
- a boolean literal, transform to the int value.
Otherwise, return the operand as it is.
"""
from pyspark.pandas.base import IndexOpsMixin
if isinstance(operand, IndexOpsMixin) and isinstance(operand.spark.data_type, BooleanType):
assert spark_type, "spark_type must be provided if the operand is a boolean IndexOpsMixin"
return operand.spark.transform(lambda scol: scol.cast(spark_type))
elif isinstance(operand, bool):
return int(operand)
else:
return operand
def _as_categorical_type(
index_ops: IndexOpsLike, dtype: CategoricalDtype, spark_type: DataType
) -> IndexOpsLike:
"""Cast `index_ops` to categorical dtype, given `dtype` and `spark_type`."""
assert isinstance(dtype, CategoricalDtype)
if dtype.categories is None:
codes, uniques = index_ops.factorize()
return codes._with_new_scol(
codes.spark.column,
field=codes._internal.data_fields[0].copy(dtype=CategoricalDtype(categories=uniques)),
)
else:
categories = dtype.categories
if len(categories) == 0:
scol = SF.lit(-1)
else:
kvs = chain(
*[(SF.lit(category), SF.lit(code)) for code, category in enumerate(categories)]
)
map_scol = F.create_map(*kvs)
scol = F.coalesce(map_scol.getItem(index_ops.spark.column), SF.lit(-1))
return index_ops._with_new_scol(
scol.cast(spark_type).alias(index_ops._internal.data_fields[0].name),
field=index_ops._internal.data_fields[0].copy(
dtype=dtype, spark_type=spark_type, nullable=False
),
)
def _as_bool_type(index_ops: IndexOpsLike, dtype: Union[str, type, Dtype]) -> IndexOpsLike:
"""Cast `index_ops` to BooleanType Spark type, given `dtype`."""
from pyspark.pandas.internal import InternalField
if isinstance(dtype, extension_dtypes):
scol = index_ops.spark.column.cast(BooleanType())
else:
scol = F.when(index_ops.spark.column.isNull(), SF.lit(False)).otherwise(
index_ops.spark.column.cast(BooleanType())
)
return index_ops._with_new_scol(
scol.alias(index_ops._internal.data_spark_column_names[0]),
field=InternalField(dtype=dtype),
)
def _as_string_type(
index_ops: IndexOpsLike, dtype: Union[str, type, Dtype], *, null_str: str = str(None)
) -> IndexOpsLike:
"""Cast `index_ops` to StringType Spark type, given `dtype` and `null_str`,
representing null Spark column.
"""
from pyspark.pandas.internal import InternalField
if isinstance(dtype, extension_dtypes):
scol = index_ops.spark.column.cast(StringType())
else:
casted = index_ops.spark.column.cast(StringType())
scol = F.when(index_ops.spark.column.isNull(), null_str).otherwise(casted)
return index_ops._with_new_scol(
scol.alias(index_ops._internal.data_spark_column_names[0]),
field=InternalField(dtype=dtype),
)
def _as_other_type(
index_ops: IndexOpsLike, dtype: Union[str, type, Dtype], spark_type: DataType
) -> IndexOpsLike:
"""Cast `index_ops` to a `dtype` (`spark_type`) that needs no pre-processing.
Destination types that need pre-processing: CategoricalDtype, BooleanType, and StringType.
"""
from pyspark.pandas.internal import InternalField
need_pre_process = (
isinstance(dtype, CategoricalDtype)
or isinstance(spark_type, BooleanType)
or isinstance(spark_type, StringType)
)
assert not need_pre_process, "Pre-processing is needed before the type casting."
scol = index_ops.spark.column.cast(spark_type)
return index_ops._with_new_scol(
scol.alias(index_ops._internal.data_spark_column_names[0]),
field=InternalField(dtype=dtype),
)
class DataTypeOps(object, metaclass=ABCMeta):
"""The base class for binary operations of pandas-on-Spark objects (of different data types)."""
def __new__(cls, dtype: Dtype, spark_type: DataType) -> "DataTypeOps":
from pyspark.pandas.data_type_ops.binary_ops import BinaryOps
from pyspark.pandas.data_type_ops.boolean_ops import BooleanOps, BooleanExtensionOps
from pyspark.pandas.data_type_ops.categorical_ops import CategoricalOps
from pyspark.pandas.data_type_ops.complex_ops import ArrayOps, MapOps, StructOps
from pyspark.pandas.data_type_ops.date_ops import DateOps
from pyspark.pandas.data_type_ops.datetime_ops import DatetimeOps
from pyspark.pandas.data_type_ops.null_ops import NullOps
from pyspark.pandas.data_type_ops.num_ops import (
DecimalOps,
FractionalExtensionOps,
FractionalOps,
IntegralExtensionOps,
IntegralOps,
)
from pyspark.pandas.data_type_ops.string_ops import StringOps, StringExtensionOps
from pyspark.pandas.data_type_ops.udt_ops import UDTOps
if isinstance(dtype, CategoricalDtype):
return object.__new__(CategoricalOps)
elif isinstance(spark_type, DecimalType):
return object.__new__(DecimalOps)
elif isinstance(spark_type, FractionalType):
if extension_float_dtypes_available and type(dtype) in [Float32Dtype, Float64Dtype]:
return object.__new__(FractionalExtensionOps)
else:
return object.__new__(FractionalOps)
elif isinstance(spark_type, IntegralType):
if extension_dtypes_available and type(dtype) in [
Int8Dtype,
Int16Dtype,
Int32Dtype,
Int64Dtype,
]:
return object.__new__(IntegralExtensionOps)
else:
return object.__new__(IntegralOps)
elif isinstance(spark_type, StringType):
if extension_object_dtypes_available and isinstance(dtype, StringDtype):
return object.__new__(StringExtensionOps)
else:
return object.__new__(StringOps)
elif isinstance(spark_type, BooleanType):
if extension_object_dtypes_available and isinstance(dtype, BooleanDtype):
return object.__new__(BooleanExtensionOps)
else:
return object.__new__(BooleanOps)
elif isinstance(spark_type, TimestampType):
return object.__new__(DatetimeOps)
elif isinstance(spark_type, DateType):
return object.__new__(DateOps)
elif isinstance(spark_type, BinaryType):
return object.__new__(BinaryOps)
elif isinstance(spark_type, ArrayType):
return object.__new__(ArrayOps)
elif isinstance(spark_type, MapType):
return object.__new__(MapOps)
elif isinstance(spark_type, StructType):
return object.__new__(StructOps)
elif isinstance(spark_type, NullType):
return object.__new__(NullOps)
elif isinstance(spark_type, UserDefinedType):
return object.__new__(UDTOps)
else:
raise TypeError("Type %s was not understood." % dtype)
def __init__(self, dtype: Dtype, spark_type: DataType):
self.dtype = dtype
self.spark_type = spark_type
@property
def pretty_name(self) -> str:
raise NotImplementedError()
def add(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("Addition can not be applied to %s." % self.pretty_name)
def sub(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("Subtraction can not be applied to %s." % self.pretty_name)
def mul(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("Multiplication can not be applied to %s." % self.pretty_name)
def truediv(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("True division can not be applied to %s." % self.pretty_name)
def floordiv(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("Floor division can not be applied to %s." % self.pretty_name)
def mod(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("Modulo can not be applied to %s." % self.pretty_name)
def pow(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("Exponentiation can not be applied to %s." % self.pretty_name)
def radd(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("Addition can not be applied to %s." % self.pretty_name)
def rsub(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("Subtraction can not be applied to %s." % self.pretty_name)
def rmul(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("Multiplication can not be applied to %s." % self.pretty_name)
def rtruediv(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("True division can not be applied to %s." % self.pretty_name)
def rfloordiv(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("Floor division can not be applied to %s." % self.pretty_name)
def rmod(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("Modulo can not be applied to %s." % self.pretty_name)
def rpow(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("Exponentiation can not be applied to %s." % self.pretty_name)
def __and__(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("Bitwise and can not be applied to %s." % self.pretty_name)
def __or__(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
raise TypeError("Bitwise or can not be applied to %s." % self.pretty_name)
def rand(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
return left.__and__(right)
def ror(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
return left.__or__(right)
def restore(self, col: pd.Series) -> pd.Series:
"""Restore column when to_pandas."""
return col
def prepare(self, col: pd.Series) -> pd.Series:
"""Prepare column when from_pandas."""
return col.replace({np.nan: None})
def isnull(self, index_ops: IndexOpsLike) -> IndexOpsLike:
return index_ops._with_new_scol(
index_ops.spark.column.isNull(),
field=index_ops._internal.data_fields[0].copy(
dtype=np.dtype("bool"), spark_type=BooleanType(), nullable=False
),
)
def astype(self, index_ops: IndexOpsLike, dtype: Union[str, type, Dtype]) -> IndexOpsLike:
raise TypeError("astype can not be applied to %s." % self.pretty_name)
| apache-2.0 | 5,083,086,210,169,483,000 | 39.497041 | 100 | 0.664158 | false |
vincenzo/LIBSVM | python/svm.py | 5 | 7768 | #!/usr/bin/env python
from ctypes import *
from ctypes.util import find_library
import sys
# For unix the prefix 'lib' is not considered.
if find_library('svm'):
libsvm = CDLL(find_library('svm'))
elif find_library('libsvm'):
libsvm = CDLL(find_library('libsvm'))
else:
if sys.platform == 'win32':
libsvm = CDLL('../windows/libsvm.dll')
else:
libsvm = CDLL('../libsvm.so.2')
# Construct constants
SVM_TYPE = ['C_SVC', 'NU_SVC', 'ONE_CLASS', 'EPSILON_SVR', 'NU_SVR' ]
KERNEL_TYPE = ['LINEAR', 'POLY', 'RBF', 'SIGMOID', 'PRECOMPUTED']
for i, s in enumerate(SVM_TYPE): exec("%s = %d" % (s , i))
for i, s in enumerate(KERNEL_TYPE): exec("%s = %d" % (s , i))
PRINT_STRING_FUN = CFUNCTYPE(None, c_char_p)
def print_null(s):
return
def genFields(names, types):
return list(zip(names, types))
def fillprototype(f, restype, argtypes):
f.restype = restype
f.argtypes = argtypes
class svm_node(Structure):
_names = ["index", "value"]
_types = [c_int, c_double]
_fields_ = genFields(_names, _types)
def gen_svm_nodearray(xi, feature_max=None, issparse=None):
if isinstance(xi, dict):
index_range = xi.keys()
elif isinstance(xi, (list, tuple)):
index_range = range(len(xi))
else:
raise TypeError('xi should be a dictionary, list or tuple')
if feature_max:
assert(isinstance(feature_max, int))
index_range = filter(lambda j: j <= feature_max, index_range)
if issparse:
index_range = filter(lambda j:xi[j] != 0, index_range)
index_range = sorted(index_range)
ret = (svm_node * (len(index_range)+1))()
ret[-1].index = -1
for idx, j in enumerate(index_range):
ret[idx].index = j
ret[idx].value = xi[j]
max_idx = 0
if index_range:
max_idx = index_range[-1]
return ret, max_idx
class svm_problem(Structure):
_names = ["l", "y", "x"]
_types = [c_int, POINTER(c_double), POINTER(POINTER(svm_node))]
_fields_ = genFields(_names, _types)
def __init__(self, y, x):
if len(y) != len(x):
raise ValueError("len(y) != len(x)")
self.l = l = len(y)
max_idx = 0
x_space = self.x_space = []
for i, xi in enumerate(x):
tmp_xi, tmp_idx = gen_svm_nodearray(xi)
x_space += [tmp_xi]
max_idx = max(max_idx, tmp_idx)
self.n = max_idx
self.y = (c_double * l)()
for i, yi in enumerate(y): self.y[i] = yi
self.x = (POINTER(svm_node) * l)()
for i, xi in enumerate(self.x_space): self.x[i] = xi
class svm_parameter(Structure):
_names = ["svm_type", "kernel_type", "degree", "gamma", "coef0",
"cache_size", "eps", "C", "nr_weight", "weight_label", "weight",
"nu", "p", "shrinking", "probability"]
_types = [c_int, c_int, c_int, c_double, c_double,
c_double, c_double, c_double, c_int, POINTER(c_int), POINTER(c_double),
c_double, c_double, c_int, c_int]
_fields_ = genFields(_names, _types)
def __init__(self, options = None):
if options == None:
options = ''
self.parse_options(options)
def show(self):
attrs = svm_parameter._names + self.__dict__.keys()
values = map(lambda attr: getattr(self, attr), attrs)
for attr, val in zip(attrs, values):
print(' %s: %s' % (attr, val))
def set_to_default_values(self):
self.svm_type = C_SVC;
self.kernel_type = RBF
self.degree = 3
self.gamma = 0
self.coef0 = 0
self.nu = 0.5
self.cache_size = 100
self.C = 1
self.eps = 0.001
self.p = 0.1
self.shrinking = 1
self.probability = 0
self.nr_weight = 0
self.weight_label = (c_int*0)()
self.weight = (c_double*0)()
self.cross_validation = False
self.nr_fold = 0
self.print_func = None
def parse_options(self, options):
argv = options.split()
self.set_to_default_values()
self.print_func = cast(None, PRINT_STRING_FUN)
weight_label = []
weight = []
i = 0
while i < len(argv):
if argv[i] == "-s":
i = i + 1
self.svm_type = int(argv[i])
elif argv[i] == "-t":
i = i + 1
self.kernel_type = int(argv[i])
elif argv[i] == "-d":
i = i + 1
self.degree = int(argv[i])
elif argv[i] == "-g":
i = i + 1
self.gamma = float(argv[i])
elif argv[i] == "-r":
i = i + 1
self.coef0 = float(argv[i])
elif argv[i] == "-n":
i = i + 1
self.nu = float(argv[i])
elif argv[i] == "-m":
i = i + 1
self.cache_size = float(argv[i])
elif argv[i] == "-c":
i = i + 1
self.C = float(argv[i])
elif argv[i] == "-e":
i = i + 1
self.eps = float(argv[i])
elif argv[i] == "-p":
i = i + 1
self.p = float(argv[i])
elif argv[i] == "-h":
i = i + 1
self.shrinking = int(argv[i])
elif argv[i] == "-b":
i = i + 1
self.probability = int(argv[i])
elif argv[i] == "-q":
self.print_func = PRINT_STRING_FUN(print_null)
elif argv[i] == "-v":
i = i + 1
self.cross_validation = 1
self.nr_fold = int(argv[i])
if self.nr_fold < 2:
raise ValueError("n-fold cross validation: n must >= 2")
elif argv[i].startswith("-w"):
i = i + 1
self.nr_weight += 1
nr_weight = self.nr_weight
weight_label += [int(argv[i-1][2:])]
weight += [float(argv[i])]
else:
raise ValueError("Wrong options")
i += 1
libsvm.svm_set_print_string_function(self.print_func)
self.weight_label = (c_int*self.nr_weight)()
self.weight = (c_double*self.nr_weight)()
for i in range(self.nr_weight):
self.weight[i] = weight[i]
self.weight_label[i] = weight_label[i]
class svm_model(Structure):
def __init__(self):
self.__createfrom__ = 'python'
def __del__(self):
# free memory created by C to avoid memory leak
if hasattr(self, '__createfrom__') and self.__createfrom__ == 'C':
libsvm.svm_free_and_destroy_model(pointer(self))
def get_svm_type(self):
return libsvm.svm_get_svm_type(self)
def get_nr_class(self):
return libsvm.svm_get_nr_class(self)
def get_svr_probability(self):
return libsvm.svm_get_svr_probability(self)
def get_labels(self):
nr_class = self.get_nr_class()
labels = (c_int * nr_class)()
libsvm.svm_get_labels(self, labels)
return labels[:nr_class]
def is_probability_model(self):
return (libsvm.svm_check_probability_model(self) == 1)
def toPyModel(model_ptr):
"""
toPyModel(model_ptr) -> svm_model
Convert a ctypes POINTER(svm_model) to a Python svm_model
"""
if bool(model_ptr) == False:
raise ValueError("Null pointer")
m = model_ptr.contents
m.__createfrom__ = 'C'
return m
fillprototype(libsvm.svm_train, POINTER(svm_model), [POINTER(svm_problem), POINTER(svm_parameter)])
fillprototype(libsvm.svm_cross_validation, None, [POINTER(svm_problem), POINTER(svm_parameter), c_int, POINTER(c_double)])
fillprototype(libsvm.svm_save_model, c_int, [c_char_p, POINTER(svm_model)])
fillprototype(libsvm.svm_load_model, POINTER(svm_model), [c_char_p])
fillprototype(libsvm.svm_get_svm_type, c_int, [POINTER(svm_model)])
fillprototype(libsvm.svm_get_nr_class, c_int, [POINTER(svm_model)])
fillprototype(libsvm.svm_get_labels, None, [POINTER(svm_model), POINTER(c_int)])
fillprototype(libsvm.svm_get_svr_probability, c_double, [POINTER(svm_model)])
fillprototype(libsvm.svm_predict_values, c_double, [POINTER(svm_model), POINTER(svm_node), POINTER(c_double)])
fillprototype(libsvm.svm_predict, c_double, [POINTER(svm_model), POINTER(svm_node)])
fillprototype(libsvm.svm_predict_probability, c_double, [POINTER(svm_model), POINTER(svm_node), POINTER(c_double)])
fillprototype(libsvm.svm_free_model_content, None, [POINTER(svm_model)])
fillprototype(libsvm.svm_free_and_destroy_model, None, [POINTER(POINTER(svm_model))])
fillprototype(libsvm.svm_destroy_param, None, [POINTER(svm_parameter)])
fillprototype(libsvm.svm_check_parameter, c_char_p, [POINTER(svm_problem), POINTER(svm_parameter)])
fillprototype(libsvm.svm_check_probability_model, c_int, [POINTER(svm_model)])
fillprototype(libsvm.svm_set_print_string_function, None, [PRINT_STRING_FUN])
| bsd-3-clause | 1,009,872,799,665,381,200 | 28.992278 | 122 | 0.640062 | false |
harddevelop/jukebox | core/cipher.py | 1 | 1196 | from core.Cipher import DES
from hashlib import md5
import hashlib
import base64
import re
import os
'''
PBEWithMD5AndDES port based on
'''
class PBEWithMD5AndDES():
@staticmethod
def get_derived_key(password, salt, count):
key = password + salt
for i in range(count):
m = hashlib.md5(key)
key = m.digest()
return (key[:8], key[8:])
@staticmethod
def decrypt(msg, password):
msg_bytes = base64.b64decode(msg)
salt = msg_bytes[:8]
enc_text = msg_bytes[8:]
(dk, iv) = PBEWithMD5AndDES.get_derived_key(password, salt, 1000)
crypter = DES.new(dk, DES.MODE_CBC, iv)
text = crypter.decrypt(enc_text)
# remove the padding at the end, if any
return re.sub(r'[\x01-\x08]','',text)
@staticmethod
def encrypt(msg, password):
salt = os.urandom(8)
pad_num = 8 - (len(msg) % 8)
for i in range(pad_num):
msg += chr(pad_num)
(dk, iv) = PBEWithMD5AndDES.get_derived_key(password, salt, 1000)
crypter = DES.new(dk, DES.MODE_CBC, iv)
enc_text = crypter.encrypt(msg)
return base64.b64encode(salt + enc_text) | gpl-2.0 | 8,409,232,383,664,698,000 | 27.5 | 73 | 0.591973 | false |
danieljaouen/ansible | lib/ansible/modules/cloud/cloudstack/cs_staticnat.py | 21 | 6799 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, René Moser <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_staticnat
short_description: Manages static NATs on Apache CloudStack based clouds.
description:
- Create, update and remove static NATs.
version_added: "2.0"
author: "René Moser (@resmo)"
options:
ip_address:
description:
- Public IP address the static NAT is assigned to.
required: true
vm:
description:
- Name of virtual machine which we make the static NAT for.
- Required if I(state=present).
vm_guest_ip:
description:
- VM guest NIC secondary IP address for the static NAT.
network:
description:
- Network the IP address is related to.
version_added: "2.2"
vpc:
description:
- VPC the network related to.
version_added: "2.3"
state:
description:
- State of the static NAT.
default: present
choices: [ present, absent ]
domain:
description:
- Domain the static NAT is related to.
account:
description:
- Account the static NAT is related to.
project:
description:
- Name of the project the static NAT is related to.
zone:
description:
- Name of the zone in which the virtual machine is in.
- If not set, default zone is used.
poll_async:
description:
- Poll async jobs until job has finished.
type: bool
default: yes
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- name: Create a static NAT for IP 1.2.3.4 to web01
local_action:
module: cs_staticnat
ip_address: 1.2.3.4
vm: web01
- name: Remove a static NAT
local_action:
module: cs_staticnat
ip_address: 1.2.3.4
state: absent
'''
RETURN = '''
---
id:
description: UUID of the ip_address.
returned: success
type: string
sample: a6f7a5fc-43f8-11e5-a151-feff819cdc9f
ip_address:
description: Public IP address.
returned: success
type: string
sample: 1.2.3.4
vm_name:
description: Name of the virtual machine.
returned: success
type: string
sample: web-01
vm_display_name:
description: Display name of the virtual machine.
returned: success
type: string
sample: web-01
vm_guest_ip:
description: IP of the virtual machine.
returned: success
type: string
sample: 10.101.65.152
zone:
description: Name of zone the static NAT is related to.
returned: success
type: string
sample: ch-gva-2
project:
description: Name of project the static NAT is related to.
returned: success
type: string
sample: Production
account:
description: Account the static NAT is related to.
returned: success
type: string
sample: example account
domain:
description: Domain the static NAT is related to.
returned: success
type: string
sample: example domain
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together,
)
class AnsibleCloudStackStaticNat(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackStaticNat, self).__init__(module)
self.returns = {
'virtualmachinedisplayname': 'vm_display_name',
'virtualmachinename': 'vm_name',
'ipaddress': 'ip_address',
'vmipaddress': 'vm_guest_ip',
}
def create_static_nat(self, ip_address):
self.result['changed'] = True
args = {
'virtualmachineid': self.get_vm(key='id'),
'ipaddressid': ip_address['id'],
'vmguestip': self.get_vm_guest_ip(),
'networkid': self.get_network(key='id')
}
if not self.module.check_mode:
self.query_api('enableStaticNat', **args)
# reset ip address and query new values
self.ip_address = None
ip_address = self.get_ip_address()
return ip_address
def update_static_nat(self, ip_address):
args = {
'virtualmachineid': self.get_vm(key='id'),
'ipaddressid': ip_address['id'],
'vmguestip': self.get_vm_guest_ip(),
'networkid': self.get_network(key='id')
}
# make an alias, so we can use has_changed()
ip_address['vmguestip'] = ip_address['vmipaddress']
if self.has_changed(args, ip_address, ['vmguestip', 'virtualmachineid']):
self.result['changed'] = True
if not self.module.check_mode:
res = self.query_api('disableStaticNat', ipaddressid=ip_address['id'])
self.poll_job(res, 'staticnat')
self.query_api('enableStaticNat', **args)
# reset ip address and query new values
self.ip_address = None
ip_address = self.get_ip_address()
return ip_address
def present_static_nat(self):
ip_address = self.get_ip_address()
if not ip_address['isstaticnat']:
ip_address = self.create_static_nat(ip_address)
else:
ip_address = self.update_static_nat(ip_address)
return ip_address
def absent_static_nat(self):
ip_address = self.get_ip_address()
if ip_address['isstaticnat']:
self.result['changed'] = True
if not self.module.check_mode:
res = self.query_api('disableStaticNat', ipaddressid=ip_address['id'])
poll_async = self.module.params.get('poll_async')
if poll_async:
self.poll_job(res, 'staticnat')
return ip_address
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
ip_address=dict(required=True),
vm=dict(),
vm_guest_ip=dict(),
network=dict(),
vpc=dict(),
state=dict(choices=['present', 'absent'], default='present'),
zone=dict(),
domain=dict(),
account=dict(),
project=dict(),
poll_async=dict(type='bool', default=True),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
acs_static_nat = AnsibleCloudStackStaticNat(module)
state = module.params.get('state')
if state in ['absent']:
ip_address = acs_static_nat.absent_static_nat()
else:
ip_address = acs_static_nat.present_static_nat()
result = acs_static_nat.get_result(ip_address)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | -6,219,706,531,293,043,000 | 26.971193 | 92 | 0.61792 | false |
geerlingguy/ansible | lib/ansible/modules/fetch.py | 7 | 3790 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# This is a virtual module that is entirely implemented as an action plugin and runs on the controller
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: fetch
short_description: Fetch files from remote nodes
description:
- This module works like M(copy), but in reverse.
- It is used for fetching files from remote machines and storing them locally in a file tree, organized by hostname.
- Files that already exist at I(dest) will be overwritten if they are different than the I(src).
- This module is also supported for Windows targets.
version_added: '0.2'
options:
src:
description:
- The file on the remote system to fetch.
- This I(must) be a file, not a directory.
- Recursive fetching may be supported in a later release.
required: yes
dest:
description:
- A directory to save the file into.
- For example, if the I(dest) directory is C(/backup) a I(src) file named C(/etc/profile) on host
C(host.example.com), would be saved into C(/backup/host.example.com/etc/profile).
The host name is based on the inventory name.
required: yes
fail_on_missing:
version_added: '1.1'
description:
- When set to C(yes), the task will fail if the remote file cannot be read for any reason.
- Prior to Ansible 2.5, setting this would only fail if the source file was missing.
- The default was changed to C(yes) in Ansible 2.5.
type: bool
default: yes
validate_checksum:
version_added: '1.4'
description:
- Verify that the source and destination checksums match after the files are fetched.
type: bool
default: yes
flat:
version_added: '1.2'
description:
- Allows you to override the default behavior of appending hostname/path/to/file to the destination.
- If C(dest) ends with '/', it will use the basename of the source file, similar to the copy module.
- This can be useful if working with a single host, or if retrieving files that are uniquely named per host.
- If using multiple hosts with the same filename, the file will be overwritten for each host.
type: bool
default: no
notes:
- When running fetch with C(become), the M(slurp) module will also be
used to fetch the contents of the file for determining the remote
checksum. This effectively doubles the transfer size, and
depending on the file size can consume all available memory on the
remote or local hosts causing a C(MemoryError). Due to this it is
advisable to run this module without C(become) whenever possible.
- Prior to Ansible 2.5 this module would not fail if reading the remote
file was impossible unless C(fail_on_missing) was set.
- In Ansible 2.5 or later, playbook authors are encouraged to use
C(fail_when) or C(ignore_errors) to get this ability. They may
also explicitly set C(fail_on_missing) to C(no) to get the
non-failing behaviour.
- This module is also supported for Windows targets.
seealso:
- module: copy
- module: slurp
author:
- Ansible Core Team
- Michael DeHaan
'''
EXAMPLES = r'''
- name: Store file into /tmp/fetched/host.example.com/tmp/somefile
fetch:
src: /tmp/somefile
dest: /tmp/fetched
- name: Specifying a path directly
fetch:
src: /tmp/somefile
dest: /tmp/prefix-{{ inventory_hostname }}
flat: yes
- name: Specifying a destination path
fetch:
src: /tmp/uniquefile
dest: /tmp/special/
flat: yes
- name: Storing in a path relative to the playbook
fetch:
src: /tmp/uniquefile
dest: special/prefix-{{ inventory_hostname }}
flat: yes
'''
| gpl-3.0 | 4,189,154,183,729,560,000 | 35.095238 | 116 | 0.71372 | false |
willharris/django | tests/sites_tests/tests.py | 8 | 8628 | from __future__ import unicode_literals
from django.apps import apps
from django.conf import settings
from django.contrib.sites import models
from django.contrib.sites.management import create_default_site
from django.contrib.sites.middleware import CurrentSiteMiddleware
from django.contrib.sites.models import Site, clear_site_cache
from django.contrib.sites.requests import RequestSite
from django.contrib.sites.shortcuts import get_current_site
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db.models.signals import post_migrate
from django.http import HttpRequest
from django.test import TestCase, modify_settings, override_settings
from django.test.utils import captured_stdout
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'})
class SitesFrameworkTests(TestCase):
multi_db = True
def setUp(self):
self.site = Site(
id=settings.SITE_ID,
domain="example.com",
name="example.com",
)
self.site.save()
def test_site_manager(self):
# Make sure that get_current() does not return a deleted Site object.
s = Site.objects.get_current()
self.assertIsInstance(s, Site)
s.delete()
self.assertRaises(ObjectDoesNotExist, Site.objects.get_current)
def test_site_cache(self):
# After updating a Site object (e.g. via the admin), we shouldn't return a
# bogus value from the SITE_CACHE.
site = Site.objects.get_current()
self.assertEqual("example.com", site.name)
s2 = Site.objects.get(id=settings.SITE_ID)
s2.name = "Example site"
s2.save()
site = Site.objects.get_current()
self.assertEqual("Example site", site.name)
def test_delete_all_sites_clears_cache(self):
# When all site objects are deleted the cache should also
# be cleared and get_current() should raise a DoesNotExist.
self.assertIsInstance(Site.objects.get_current(), Site)
Site.objects.all().delete()
self.assertRaises(Site.DoesNotExist, Site.objects.get_current)
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_get_current_site(self):
# Test that the correct Site object is returned
request = HttpRequest()
request.META = {
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
}
site = get_current_site(request)
self.assertIsInstance(site, Site)
self.assertEqual(site.id, settings.SITE_ID)
# Test that an exception is raised if the sites framework is installed
# but there is no matching Site
site.delete()
self.assertRaises(ObjectDoesNotExist, get_current_site, request)
# A RequestSite is returned if the sites framework is not installed
with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}):
site = get_current_site(request)
self.assertIsInstance(site, RequestSite)
self.assertEqual(site.name, "example.com")
@override_settings(SITE_ID='', ALLOWED_HOSTS=['example.com'])
def test_get_current_site_no_site_id(self):
request = HttpRequest()
request.META = {
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
}
del settings.SITE_ID
site = get_current_site(request)
self.assertEqual(site.name, "example.com")
def test_domain_name_with_whitespaces(self):
# Regression for #17320
# Domain names are not allowed contain whitespace characters
site = Site(name="test name", domain="test test")
self.assertRaises(ValidationError, site.full_clean)
site.domain = "test\ttest"
self.assertRaises(ValidationError, site.full_clean)
site.domain = "test\ntest"
self.assertRaises(ValidationError, site.full_clean)
def test_clear_site_cache(self):
request = HttpRequest()
request.META = {
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
}
self.assertEqual(models.SITE_CACHE, {})
get_current_site(request)
expected_cache = {self.site.id: self.site}
self.assertEqual(models.SITE_CACHE, expected_cache)
with self.settings(SITE_ID=''):
get_current_site(request)
expected_cache.update({self.site.domain: self.site})
self.assertEqual(models.SITE_CACHE, expected_cache)
clear_site_cache(Site, instance=self.site, using='default')
self.assertEqual(models.SITE_CACHE, {})
@override_settings(SITE_ID='')
def test_clear_site_cache_domain(self):
site = Site.objects.create(name='example2.com', domain='example2.com')
request = HttpRequest()
request.META = {
"SERVER_NAME": "example2.com",
"SERVER_PORT": "80",
}
get_current_site(request) # prime the models.SITE_CACHE
expected_cache = {site.domain: site}
self.assertEqual(models.SITE_CACHE, expected_cache)
# Site exists in 'default' database so using='other' shouldn't clear.
clear_site_cache(Site, instance=site, using='other')
self.assertEqual(models.SITE_CACHE, expected_cache)
# using='default' should clear.
clear_site_cache(Site, instance=site, using='default')
self.assertEqual(models.SITE_CACHE, {})
class JustOtherRouter(object):
def allow_migrate(self, db, app_label, **hints):
return db == 'other'
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'})
class CreateDefaultSiteTests(TestCase):
multi_db = True
def setUp(self):
self.app_config = apps.get_app_config('sites')
# Delete the site created as part of the default migration process.
Site.objects.all().delete()
def test_basic(self):
"""
#15346, #15573 - create_default_site() creates an example site only if
none exist.
"""
with captured_stdout() as stdout:
create_default_site(self.app_config)
self.assertEqual(Site.objects.count(), 1)
self.assertIn("Creating example.com", stdout.getvalue())
with captured_stdout() as stdout:
create_default_site(self.app_config)
self.assertEqual(Site.objects.count(), 1)
self.assertEqual("", stdout.getvalue())
@override_settings(DATABASE_ROUTERS=[JustOtherRouter()])
def test_multi_db_with_router(self):
"""
#16353, #16828 - The default site creation should respect db routing.
"""
create_default_site(self.app_config, using='default', verbosity=0)
create_default_site(self.app_config, using='other', verbosity=0)
self.assertFalse(Site.objects.using('default').exists())
self.assertTrue(Site.objects.using('other').exists())
def test_multi_db(self):
create_default_site(self.app_config, using='default', verbosity=0)
create_default_site(self.app_config, using='other', verbosity=0)
self.assertTrue(Site.objects.using('default').exists())
self.assertTrue(Site.objects.using('other').exists())
def test_save_another(self):
"""
#17415 - Another site can be created right after the default one.
On some backends the sequence needs to be reset after saving with an
explicit ID. Test that there isn't a sequence collisions by saving
another site. This test is only meaningful with databases that use
sequences for automatic primary keys such as PostgreSQL and Oracle.
"""
create_default_site(self.app_config, verbosity=0)
Site(domain='example2.com', name='example2.com').save()
def test_signal(self):
"""
#23641 - Sending the ``post_migrate`` signal triggers creation of the
default site.
"""
post_migrate.send(sender=self.app_config, app_config=self.app_config, verbosity=0)
self.assertTrue(Site.objects.exists())
@override_settings(SITE_ID=35696)
def test_custom_site_id(self):
"""
#23945 - The configured ``SITE_ID`` should be respected.
"""
create_default_site(self.app_config, verbosity=0)
self.assertEqual(Site.objects.get().pk, 35696)
class MiddlewareTest(TestCase):
def test_request(self):
""" Makes sure that the request has correct `site` attribute. """
middleware = CurrentSiteMiddleware()
request = HttpRequest()
middleware.process_request(request)
self.assertEqual(request.site.id, settings.SITE_ID)
| bsd-3-clause | -9,042,026,056,493,253,000 | 38.218182 | 90 | 0.65044 | false |
weblyzard/ewrt | tests/ws/wikidata/test_bundle_wikipedia_requests.py | 1 | 15715 | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Created on October 04, 2018
@author: jakob <[email protected]>
'''
from __future__ import print_function
from builtins import next
from builtins import str
from builtins import range
import datetime
import mock
import pytest
from eWRT.ws.wikidata.bundle_wikipedia_requests import collect_multiple_from_wikipedia, \
wikipedia_request_dispatcher, batch_enrich_from_wikipedia
GW_snapshot_wikidata_result = {
u'https://www.wikidata.org/wiki/Q23': {u'frwiki': u'George Washington',
u'position held': {
'url': 'https://www.wikidata.org/wiki/Property:P39',
'values': [{
'url': u'https://www.wikidata.org/wiki/Q11696',
'temporal_attributes': {
'end date': u'+1797-03-04T00:00:00Z',
'start date': u'+1789-04-30T00:00:00Z'},
'labels': {
'de': u'Pr\xe4sident der Vereinigten Staaten',
'en': u'President of the United States',
'fr': u'pr\xe9sident des \xc9tats-Unis',
'es': u'presidente de Estados Unidos'},
'claim_id': u'q23$B6E5D112-C27E-4E3F-BB65-CB12B9364092'},
{
'url': u'https://www.wikidata.org/wiki/Q1115127',
'temporal_attributes': {
'end date': u'+1799-12-14T00:00:00Z',
'start date': u'+1798-07-13T00:00:00Z'},
'labels': {
'de': u'Commanding General of the United States Army',
'en': u'Commanding General of the United States Army',
'fr': u'Commanding General of the United States Army',
'es': u'comandante general del Ej\xe9rcito de los Estados Unidos'},
'claim_id': u'Q23$6A44E261-3592-4928-979B-0BF1CAB2D39C'},
{
'url': u'https://www.wikidata.org/wiki/Q1115127',
'temporal_attributes': {
'end date': u'+1788-12-23T00:00:00Z',
'start date': u'+1775-06-15T00:00:00Z'},
'labels': {
'de': u'Commanding General of the United States Army',
'en': u'Commanding General of the United States Army',
'fr': u'Commanding General of the United States Army',
'es': u'comandante general del Ej\xe9rcito de los Estados Unidos'},
'claim_id': u'Q23$2c113ca2-4177-4a24-eb0c-6c284ff03416'}]},
'wikidata_timestamp': '2018-10-03T00:05:30Z',
'url': u'https://www.wikidata.org/wiki/Q23',
u'date of birth': {
'url': 'https://www.wikidata.org/wiki/Property:P569',
'values': [{
'claim_id': u'Q23$3BF0223A-D656-435B-9FD1-32E0B8F54A69',
'value': u'+1732-02-22T00:00:00Z'}]},
u'dewiki': u'George Washington',
u'eswiki': u'George Washington',
'labels': {
'de': u'George Washington',
'en': u'George Washington',
'fr': u'George Washington',
'es': u'George Washington'},
u'place of birth': {
'url': 'https://www.wikidata.org/wiki/Property:P19',
'values': [{
'url': u'https://www.wikidata.org/wiki/Q495645',
'labels': {
'de': u'Westmoreland County',
'en': u'Westmoreland County',
'fr': u'comt\xe9 de Westmoreland',
'es': u'Condado de Westmoreland'},
'claim_id': u'Q23$ca56ecac-bad6-4d4c-ad29-36a26244955a'}]},
u'enwiki': u'George Washington',
'descriptions': {
'de': u'erster Pr\xe4sident der Vereinigten Staaten von Amerika',
'en': u'first President of the United States',
'fr': u"premier pr\xe9sident des \xc9tats-Unis d'Am\xe9rique",
'es': u'primer presidente de los Estados Unidos de Am\xe9rica'},
'wikidata_id': u'Q23', 'country': {
'url': 'https://www.wikidata.org/wiki/Property:P17', 'values': [
{'url': u'https://www.wikidata.org/wiki/Q30',
'labels': {'de': u'Vereinigte Staaten',
'en': u'United States of America',
'fr': u'\xc9tats-Unis', 'es': u'Estados Unidos'},
'claim_id': u'Q23@q495645$A10AFE59-9C11-40BC-87A5-567221D430AA'}]},
'aliases': {'de': [
u'Pr\xe4sident Washington',
u'G. Washington'],
'en': [u'Washington',
u'President Washington',
u'G. Washington',
u'Father of the United States']}}}
GW_snapshot_wikipedia_result = {
'url': u'https://en.wikipedia.org/wiki/George_Washington',
'timestamp': u'2018-10-04T05:06:49Z',
'title': u'George Washington',
'language': 'en',
'summary': u'George Washington (February 22, 1732 \u2013 December 14, 1799) was '
u'one of the Founding Fathers of the United States and served as '
u'the nation\u2019s first President (1789\u20131797). In the '
u'American Revolutionary War, he commanded Patriot forces to '
u'victory against the British and their allies. He presided over '
u'the Constitutional Convention of 1787 which established the new '
u'federal government, and he has been called the "Father of His '
u'Country".\nWashington was born to a moderately prosperous '
u'Virginian family of colonial planters and slaveholders. He '
u'had early educational opportunities, learned mathematics, '
u'and soon launched a successful career as a surveyor which '
u'enabled him to make significant land investments. He then '
u'joined the Virginia militia and fought in the French and Indian '
u'War. He was appointed commander-in-chief of the Continental Army '
u'during the Revolutionary War, leading an allied campaign to '
u'victory at the Siege of Yorktown which ended the war. His '
u'devotion to Republicanism and revulsion for tyrannical power '
u'impelled him to decline further authority after victory, and '
u'he resigned as commander-in-chief in 1783.\nAs one of the '
u'country\u2019s premier statesmen, Washington was unanimously '
u'elected President by the Electoral College in the first two '
u'national elections. He promoted and oversaw implementation of '
u'a strong, well-financed national government. He remained '
u'impartial in the fierce rivalry between cabinet secretaries '
u'Thomas Jefferson and Alexander Hamilton, although he adopted '
u'Hamilton\'s economic plans. When the French Revolution plunged '
u'Europe into war, Washington assumed a policy of neutrality to '
u'protect American ships\u2014although the controversial Jay '
u'Treaty of 1795 normalized trade relations with Great Britain. '
u'He set precedents still in use today, such as the Cabinet '
u'advisory system, the inaugural address, the title "Mr. '
u'President", and the concept of a two-term office limit. His '
u'Farewell Address strongly warned against political partisanship, '
u'sectionalism, and involvement in foreign wars.\nWashington '
u'inherited slaves at age 11 and officially supported other '
u'slaveholders as late as 1793. He eventually became troubled '
u'with slavery, however, and he freed all his slaves in his will '
u'in 1799. He is widely known for his religious toleration while '
u'his religious beliefs have been thoroughly debated by '
u'historians. Upon his death, Washington was famously eulogized as '
u'"first in war, first in peace, and first in the hearts of his '
u'countrymen". He has been widely memorialized by monuments, art, '
u'places, stamps, and currency, and has been consistently ranked '
u'by scholars among the top American presidents.'}
sitelink_cache = {
'en': {'George Washington': u'https://www.wikidata.org/wiki/Q23'}}
# mock_enrich = mock.Mock()
# mock_enrich.return_value = (el for el in [GW_snapshot_wikipedia_result])
def batch_enrich_mock(title, language):
print(title
)
assert (language, title) == ('en', u'George Washington')
return ((GW_snapshot_wikipedia_result,))
@mock.patch(
target='eWRT.ws.wikidata.bundle_wikipedia_requests.wikipedia_page_info_from_titles',
new=batch_enrich_mock)
def test_batch_enrich_from_wikipedia():
"""
Using a mock for wikipedia_page_info_from_titles, this test runs fully
offline."""
enrichment_result = batch_enrich_from_wikipedia(
wikipedia_pages=sitelink_cache['en'],
entities_cache=GW_snapshot_wikidata_result,
language='en',
)
merge_result = next(enrichment_result)
assert_basic_structure_as_expected(merge_result)
assert merge_result['enwiki'] == GW_snapshot_wikipedia_result
def test_collect_multiple_from_wikipedia():
global sitelink_cache
enrichment_result = next(collect_multiple_from_wikipedia(
sitelinks_cache=sitelink_cache,
entities_cache=GW_snapshot_wikidata_result
))
try:
modified_sitelink_cache = {'de': {}}
enrichment_result = next(collect_multiple_from_wikipedia(
sitelinks_cache=modified_sitelink_cache,
entities_cache=GW_snapshot_wikidata_result
))
raise ValueError
except StopIteration:
pass
def test_enrich_from_wikipedia_offline():
"""
No mock, real call to Wikipedia API, basic structure should still be
the same but literal equivalence between merge_result['enwiki'] and
cached snapshot is not expected
"""
with mock.patch(target='eWRT.ws.wikidata.bundle_wikipedia_requests.wikipedia_page_info_from_titles',
new=batch_enrich_mock):
enrichment_result = next(batch_enrich_from_wikipedia(
wikipedia_pages=sitelink_cache['en'],
entities_cache=GW_snapshot_wikidata_result,
language='en',
))
assert_basic_structure_as_expected(enrichment_result)
assert GW_snapshot_wikipedia_result['timestamp'] == enrichment_result['enwiki']['timestamp']
def assert_basic_structure_as_expected(merged_result):
"""
Check whether the basic structure and keys included are as expected.
:param merged_result:
:return:
"""
assert isinstance(merged_result, dict)
assert merged_result['language'] == 'en'
other_language_wikis = ('dewiki', 'frwiki', 'eswiki')
# assert all([key not in merged_result for key in other_language_wikis])
assert merged_result['labels'] == 'George Washington'
assert all([key in merged_result['enwiki'] for key in
GW_snapshot_wikipedia_result])
assert isinstance(merged_result['enwiki'], dict)
wiki_timestamp = merged_result['enwiki']['timestamp']
try:
datetime.datetime.strptime(wiki_timestamp, u'%Y-%m-%dT%H:%M:%SZ')
except ValueError:
raise ValueError('Timestamp doesn\'t appear to be a valid time. '
'Timestamp returned was: {}, expected format {}'.format(
wiki_timestamp,
datetime.datetime.now().strftime(u'%Y-%m-%dT%H:%M:%SZ')))
assert u'2018-10-04T05:06:49Z' <= merged_result['enwiki'][
'timestamp'] < datetime.datetime.now().strftime(u'%Y-%m-%dT%H:%M:%SZ')
# todo: add test for similarity of retrieved summary with snapshot?
batch_calls = 0
def mock_batch_enrich(*args, **kwargs):
"""generator that yields 20 entities at a time, for no more than a
total of 100"""
global batch_calls
batch_calls += 1
if batch_calls > 5:
raise StopIteration
for i in range(20):
yield {}
@mock.patch(
target='eWRT.ws.wikidata.bundle_wikipedia_requests.batch_enrich_from_wikipedia',
new=mock_batch_enrich)
@pytest.mark.skip
def test_wikipedia_request_dispatcher():
sitelink_cache = {'en': {str(i): i for i in range(100)}}
results = wikipedia_request_dispatcher(sitelinks_cache=sitelink_cache,
entity_cache=GW_snapshot_wikidata_result,
languages=['en'])
returned = [result for result in results]
assert returned
assert len(returned) >= 100
| gpl-3.0 | 5,938,466,527,326,198,000 | 56.98893 | 126 | 0.490105 | false |
Pajinek/spacewalk | backend/server/action/image.py | 14 | 1837 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2011 SUSE LLC
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
from spacewalk.common.rhnLog import log_debug
from spacewalk.server import rhnSQL
from spacewalk.server.rhnLib import InvalidAction
# the "exposed" functions
__rhnexport__ = ['deploy']
# returns the values for deploying a virtual machine with an image
#
# file_name, checksum, mem_kb, vcpus, imageType
#
def deploy(serverId, actionId, dry_run=0):
log_debug(3)
statement = """
select aid.mem_kb, aid.vcpus, aid.bridge_device,aid.download_url,
aid.proxy_server, aid.proxy_user, aid.proxy_pass
from rhnActionImageDeploy aid
where aid.action_id = :action_id"""
h = rhnSQL.prepare(statement)
h.execute(action_id=actionId)
row = h.fetchone_dict()
if not row:
# No image for this action
raise InvalidAction("image.deploy: No image found for action id "
"%s and server %s" % (actionId, serverId))
for key in ['download_url', 'proxy_server', 'proxy_user', 'proxy_pass', 'bridge_device']:
if row[key] is None:
row[key] = ""
params = {
"downloadURL": row['download_url'],
"proxySettings": {"proxyURL": row['proxy_server'], "proxyUser": row['proxy_user'], "proxyPass": row['proxy_pass']},
"memKB": row['mem_kb'],
"vCPUs": row['vcpus'],
"domainName": "",
"virtBridge": row['bridge_device']}
return (params)
| gpl-2.0 | 3,313,556,402,483,723,000 | 34.326923 | 123 | 0.645618 | false |
pleaseproject/python-for-android | python3-alpha/extra_modules/gdata/Crypto/Hash/HMAC.py | 45 | 3337 | """HMAC (Keyed-Hashing for Message Authentication) Python module.
Implements the HMAC algorithm as described by RFC 2104.
This is just a copy of the Python 2.2 HMAC module, modified to work when
used on versions of Python before 2.2.
"""
__revision__ = "$Id: HMAC.py,v 1.5 2002/07/25 17:19:02 z3p Exp $"
import string
def _strxor(s1, s2):
"""Utility method. XOR the two strings s1 and s2 (must have same length).
"""
return "".join(map(lambda x, y: chr(ord(x) ^ ord(y)), s1, s2))
# The size of the digests returned by HMAC depends on the underlying
# hashing module used.
digest_size = None
class HMAC:
"""RFC2104 HMAC class.
This supports the API for Cryptographic Hash Functions (PEP 247).
"""
def __init__(self, key, msg = None, digestmod = None):
"""Create a new HMAC object.
key: key for the keyed hash object.
msg: Initial input for the hash, if provided.
digestmod: A module supporting PEP 247. Defaults to the md5 module.
"""
if digestmod == None:
from . import md5
digestmod = md5
self.digestmod = digestmod
self.outer = digestmod.new()
self.inner = digestmod.new()
try:
self.digest_size = digestmod.digest_size
except AttributeError:
self.digest_size = len(self.outer.digest())
blocksize = 64
ipad = "\x36" * blocksize
opad = "\x5C" * blocksize
if len(key) > blocksize:
key = digestmod.new(key).digest()
key = key + chr(0) * (blocksize - len(key))
self.outer.update(_strxor(key, opad))
self.inner.update(_strxor(key, ipad))
if (msg):
self.update(msg)
## def clear(self):
## raise NotImplementedError, "clear() method not available in HMAC."
def update(self, msg):
"""Update this hashing object with the string msg.
"""
self.inner.update(msg)
def copy(self):
"""Return a separate copy of this hashing object.
An update to this copy won't affect the original object.
"""
other = HMAC("")
other.digestmod = self.digestmod
other.inner = self.inner.copy()
other.outer = self.outer.copy()
return other
def digest(self):
"""Return the hash value of this hashing object.
This returns a string containing 8-bit data. The object is
not altered in any way by this function; you can continue
updating the object after calling this function.
"""
h = self.outer.copy()
h.update(self.inner.digest())
return h.digest()
def hexdigest(self):
"""Like digest(), but returns a string of hexadecimal digits instead.
"""
return "".join([string.zfill(hex(ord(x))[2:], 2)
for x in tuple(self.digest())])
def new(key, msg = None, digestmod = None):
"""Create a new hashing object and return it.
key: The starting key for the hash.
msg: if available, will immediately be hashed into the object's starting
state.
You can now feed arbitrary strings into the object using its update()
method, and can ask for the hash value at any time by calling its digest()
method.
"""
return HMAC(key, msg, digestmod)
| apache-2.0 | 8,959,498,752,268,538,000 | 29.898148 | 78 | 0.606233 | false |
akash1808/nova | nova/tests/functional/v3/test_extended_server_attributes.py | 28 | 2687 | # Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from nova.tests.functional.v3 import test_servers
CONF = cfg.CONF
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.extensions')
class ExtendedServerAttributesJsonTest(test_servers.ServersSampleBase):
extension_name = "os-extended-server-attributes"
extra_extensions_to_load = ["os-access-ips"]
_api_version = 'v2'
def _get_flags(self):
f = super(ExtendedServerAttributesJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.keypairs.Keypairs')
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.extended_ips.Extended_ips')
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.extended_ips_mac.'
'Extended_ips_mac')
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.extended_server_attributes.'
'Extended_server_attributes')
return f
def test_show(self):
uuid = self._post_server()
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['instance_name'] = 'instance-\d{8}'
subs['hypervisor_hostname'] = r'[\w\.\-]+'
subs['access_ip_v4'] = '1.2.3.4'
subs['access_ip_v6'] = '80fe::'
self._verify_response('server-get-resp', subs, response, 200)
def test_detail(self):
uuid = self._post_server()
response = self._do_get('servers/detail')
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['instance_name'] = 'instance-\d{8}'
subs['hypervisor_hostname'] = r'[\w\.\-]+'
subs['access_ip_v4'] = '1.2.3.4'
subs['access_ip_v6'] = '80fe::'
self._verify_response('servers-detail-resp', subs, response, 200)
| apache-2.0 | 7,460,229,925,131,192,000 | 37.942029 | 78 | 0.630071 | false |
miyakz1192/neutron | neutron/db/migration/models/frozen.py | 10 | 72451 | # Copyright (c) 2014 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This module should not be changed.
The module provides all database models that were present at the moment of
creation of heal_script.
Its purpose is to create comparable metadata with current database schema.
Based on this comparison database can be healed with healing migration.
Current HEAD commit is 59da928e945ec58836d34fd561d30a8a446e2728
"""
import sqlalchemy as sa
from sqlalchemy.ext import declarative
from sqlalchemy.ext.orderinglist import ordering_list
from sqlalchemy import orm
from sqlalchemy import schema
from neutron.db import model_base
from neutron.openstack.common import uuidutils
# Dictionary of all tables that was renamed:
# {new_table_name: old_table_name}
renamed_tables = {
'subnetroutes': 'routes',
'cisco_credentials': 'credentials',
'cisco_nexusport_bindings': 'nexusport_bindings',
'cisco_qos_policies': 'qoss',
'tz_network_bindings': 'nvp_network_bindings',
'multi_provider_networks': 'nvp_multi_provider_networks',
'net_partitions': 'nuage_net_partitions',
'net_partition_router_mapping': 'nuage_net_partition_router_mapping',
'router_zone_mapping': 'nuage_router_zone_mapping',
'subnet_l2dom_mapping': 'nuage_subnet_l2dom_mapping',
'port_mapping': 'nuage_port_mapping',
'routerroutes_mapping': 'nuage_routerroutes_mapping',
}
#neutron/plugins/ml2/drivers/mech_arista/db.py
UUID_LEN = 36
STR_LEN = 255
#neutron/plugins/cisco/common/cisco_constants.py
CISCO_CONSTANTS_NETWORK_TYPE_VLAN = 'vlan'
CISCO_CONSTANTS_NETWORK_TYPE_OVERLAY = 'overlay'
CISCO_CONSTANTS_NETWORK_TYPE_TRUNK = 'trunk'
CISCO_CONSTANTS_NETWORK_TYPE_MULTI_SEGMENT = 'multi-segment'
CISCO_CONSTANTS_NETWORK = 'network'
CISCO_CONSTANTS_POLICY = 'policy'
CISCO_CONSTANTS_TENANT_ID_NOT_SET = 'TENANT_ID_NOT_SET'
#neutron/plugins/ml2/models.py
BINDING_PROFILE_LEN = 4095
#neutron/extensions/portbindings.py
VNIC_NORMAL = 'normal'
#neutron/common/constants.py
IPV6_SLAAC = 'slaac'
DHCPV6_STATEFUL = 'dhcpv6-stateful'
DHCPV6_STATELESS = 'dhcpv6-stateless'
BASEV2 = declarative.declarative_base(cls=model_base.NeutronBaseV2)
#neutron/db/models_v2.py
class HasTenant(object):
tenant_id = sa.Column(sa.String(255))
#neutron/db/models_v2.py
class HasId(object):
id = sa.Column(sa.String(36),
primary_key=True,
default=uuidutils.generate_uuid)
#neutron/db/models_v2.py
class HasStatusDescription(object):
status = sa.Column(sa.String(16), nullable=False)
status_description = sa.Column(sa.String(255))
#neutron/db/models_v2.py
class IPAvailabilityRange(BASEV2):
allocation_pool_id = sa.Column(sa.String(36),
sa.ForeignKey('ipallocationpools.id',
ondelete="CASCADE"),
nullable=False,
primary_key=True)
first_ip = sa.Column(sa.String(64), nullable=False, primary_key=True)
last_ip = sa.Column(sa.String(64), nullable=False, primary_key=True)
#neutron/db/models_v2.py
class IPAllocationPool(BASEV2, HasId):
subnet_id = sa.Column(sa.String(36), sa.ForeignKey('subnets.id',
ondelete="CASCADE"),
nullable=True)
first_ip = sa.Column(sa.String(64), nullable=False)
last_ip = sa.Column(sa.String(64), nullable=False)
available_ranges = orm.relationship(IPAvailabilityRange,
backref='ipallocationpool',
lazy="joined",
cascade='all, delete-orphan')
#neutron/db/models_v2.py
class IPAllocation(BASEV2):
port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id',
ondelete="CASCADE"),
nullable=True)
ip_address = sa.Column(sa.String(64), nullable=False, primary_key=True)
subnet_id = sa.Column(sa.String(36), sa.ForeignKey('subnets.id',
ondelete="CASCADE"),
nullable=False, primary_key=True)
network_id = sa.Column(sa.String(36), sa.ForeignKey("networks.id",
ondelete="CASCADE"),
nullable=False, primary_key=True)
#neutron/db/models_v2.py
class Route(object):
destination = sa.Column(sa.String(64), nullable=False, primary_key=True)
nexthop = sa.Column(sa.String(64), nullable=False, primary_key=True)
#neutron/db/models_v2.py
class SubnetRoute(BASEV2, Route):
subnet_id = sa.Column(sa.String(36),
sa.ForeignKey('subnets.id',
ondelete="CASCADE"),
primary_key=True)
#neutron/db/models_v2.py
class Port(BASEV2, HasId, HasTenant):
name = sa.Column(sa.String(255))
network_id = sa.Column(sa.String(36), sa.ForeignKey("networks.id"),
nullable=False)
fixed_ips = orm.relationship(IPAllocation, backref='ports', lazy='joined')
mac_address = sa.Column(sa.String(32), nullable=False)
admin_state_up = sa.Column(sa.Boolean(), nullable=False)
status = sa.Column(sa.String(16), nullable=False)
device_id = sa.Column(sa.String(255), nullable=False)
device_owner = sa.Column(sa.String(255), nullable=False)
#neutron/db/models_v2.py
class DNSNameServer(BASEV2):
address = sa.Column(sa.String(128), nullable=False, primary_key=True)
subnet_id = sa.Column(sa.String(36),
sa.ForeignKey('subnets.id',
ondelete="CASCADE"),
primary_key=True)
#neutron/db/models_v2.py
class Subnet(BASEV2, HasId, HasTenant):
name = sa.Column(sa.String(255))
network_id = sa.Column(sa.String(36), sa.ForeignKey('networks.id'))
ip_version = sa.Column(sa.Integer, nullable=False)
cidr = sa.Column(sa.String(64), nullable=False)
gateway_ip = sa.Column(sa.String(64))
allocation_pools = orm.relationship(IPAllocationPool,
backref='subnet',
lazy="joined",
cascade='delete')
enable_dhcp = sa.Column(sa.Boolean())
dns_nameservers = orm.relationship(DNSNameServer,
backref='subnet',
cascade='all, delete, delete-orphan')
routes = orm.relationship(SubnetRoute,
backref='subnet',
cascade='all, delete, delete-orphan')
shared = sa.Column(sa.Boolean)
ipv6_ra_mode = sa.Column(sa.Enum(IPV6_SLAAC,
DHCPV6_STATEFUL,
DHCPV6_STATELESS,
name='ipv6_ra_modes'), nullable=True)
ipv6_address_mode = sa.Column(sa.Enum(IPV6_SLAAC,
DHCPV6_STATEFUL,
DHCPV6_STATELESS,
name='ipv6_address_modes'),
nullable=True)
#neutron/db/models_v2.py
class Network(BASEV2, HasId, HasTenant):
name = sa.Column(sa.String(255))
ports = orm.relationship(Port, backref='networks')
subnets = orm.relationship(Subnet, backref='networks',
lazy="joined")
status = sa.Column(sa.String(16))
admin_state_up = sa.Column(sa.Boolean)
shared = sa.Column(sa.Boolean)
#neutron/db/agents_db.py
class Agent(BASEV2, HasId):
__table_args__ = (
sa.UniqueConstraint('agent_type', 'host',
name='uniq_agents0agent_type0host'),
)
agent_type = sa.Column(sa.String(255), nullable=False)
binary = sa.Column(sa.String(255), nullable=False)
topic = sa.Column(sa.String(255), nullable=False)
host = sa.Column(sa.String(255), nullable=False)
admin_state_up = sa.Column(sa.Boolean, default=True,
server_default=sa.sql.true(), nullable=False)
created_at = sa.Column(sa.DateTime, nullable=False)
started_at = sa.Column(sa.DateTime, nullable=False)
heartbeat_timestamp = sa.Column(sa.DateTime, nullable=False)
description = sa.Column(sa.String(255))
configurations = sa.Column(sa.String(4095), nullable=False)
#neutron/db/agentschedulers_db.py
class NetworkDhcpAgentBinding(BASEV2):
network_id = sa.Column(sa.String(36),
sa.ForeignKey("networks.id", ondelete='CASCADE'),
primary_key=True)
dhcp_agent = orm.relation(Agent)
dhcp_agent_id = sa.Column(sa.String(36),
sa.ForeignKey("agents.id",
ondelete='CASCADE'),
primary_key=True)
#neutron/db/allowedaddresspairs_db.py
class AllowedAddressPair(BASEV2):
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
mac_address = sa.Column(sa.String(32), nullable=False, primary_key=True)
ip_address = sa.Column(sa.String(64), nullable=False, primary_key=True)
port = orm.relationship(
Port,
backref=orm.backref("allowed_address_pairs",
lazy="joined", cascade="delete"))
#neutron/db/external_net_db.py
class ExternalNetwork(BASEV2):
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
primary_key=True)
network = orm.relationship(
Network,
backref=orm.backref("external", lazy='joined',
uselist=False, cascade='delete'))
#neutron/db/extradhcpopt_db.py
class ExtraDhcpOpt(BASEV2, HasId):
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
nullable=False)
opt_name = sa.Column(sa.String(64), nullable=False)
opt_value = sa.Column(sa.String(255), nullable=False)
__table_args__ = (sa.UniqueConstraint('port_id',
'opt_name',
name='uidx_portid_optname'),
BASEV2.__table_args__,)
ports = orm.relationship(
Port,
backref=orm.backref("dhcp_opts", lazy='joined', cascade='delete'))
#neutron/db/l3_db.py
class Router(BASEV2, HasId, HasTenant):
name = sa.Column(sa.String(255))
status = sa.Column(sa.String(16))
admin_state_up = sa.Column(sa.Boolean)
gw_port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id'))
gw_port = orm.relationship(Port, lazy='joined')
enable_snat = sa.Column(sa.Boolean, default=True,
server_default=sa.sql.true(), nullable=False)
#neutron/db/l3_db.py
class FloatingIP(BASEV2, HasId, HasTenant):
floating_ip_address = sa.Column(sa.String(64), nullable=False)
floating_network_id = sa.Column(sa.String(36), nullable=False)
floating_port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id'),
nullable=False)
fixed_port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id'))
fixed_ip_address = sa.Column(sa.String(64))
router_id = sa.Column(sa.String(36), sa.ForeignKey('routers.id'))
last_known_router_id = sa.Column(sa.String(36))
status = sa.Column(sa.String(16))
#neutron/db/extraroute_db.py
class RouterRoute(BASEV2, Route):
router_id = sa.Column(sa.String(36),
sa.ForeignKey('routers.id',
ondelete="CASCADE"),
primary_key=True)
router = orm.relationship(Router,
backref=orm.backref("route_list",
lazy='joined',
cascade='delete'))
#neutron/db/servicetype_db.py
class ProviderResourceAssociation(BASEV2):
provider_name = sa.Column(sa.String(255),
nullable=False, primary_key=True)
resource_id = sa.Column(sa.String(36), nullable=False, primary_key=True,
unique=True)
#neutron/db/firewall/firewall_db.py
class FirewallRule(BASEV2, HasId, HasTenant):
__tablename__ = 'firewall_rules'
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(1024))
firewall_policy_id = sa.Column(sa.String(36),
sa.ForeignKey('firewall_policies.id'),
nullable=True)
shared = sa.Column(sa.Boolean)
protocol = sa.Column(sa.String(40))
ip_version = sa.Column(sa.Integer, nullable=False)
source_ip_address = sa.Column(sa.String(46))
destination_ip_address = sa.Column(sa.String(46))
source_port_range_min = sa.Column(sa.Integer)
source_port_range_max = sa.Column(sa.Integer)
destination_port_range_min = sa.Column(sa.Integer)
destination_port_range_max = sa.Column(sa.Integer)
action = sa.Column(sa.Enum('allow', 'deny', name='firewallrules_action'))
enabled = sa.Column(sa.Boolean)
position = sa.Column(sa.Integer)
#neutron/db/firewall/firewall_db.py
class Firewall(BASEV2, HasId, HasTenant):
__tablename__ = 'firewalls'
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(1024))
shared = sa.Column(sa.Boolean)
admin_state_up = sa.Column(sa.Boolean)
status = sa.Column(sa.String(16))
firewall_policy_id = sa.Column(sa.String(36),
sa.ForeignKey('firewall_policies.id'),
nullable=True)
#neutron/db/firewall/firewall_db.py
class FirewallPolicy(BASEV2, HasId, HasTenant):
__tablename__ = 'firewall_policies'
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(1024))
shared = sa.Column(sa.Boolean)
firewall_rules = orm.relationship(
FirewallRule,
backref=orm.backref('firewall_policies', cascade='all, delete'),
order_by='FirewallRule.position',
collection_class=ordering_list('position', count_from=1))
audited = sa.Column(sa.Boolean)
firewalls = orm.relationship(Firewall, backref='firewall_policies')
#neutron/db/l3_agentschedulers_db.py
class RouterL3AgentBinding(BASEV2, HasId):
router_id = sa.Column(sa.String(36),
sa.ForeignKey("routers.id", ondelete='CASCADE'))
l3_agent = orm.relation(Agent)
l3_agent_id = sa.Column(sa.String(36),
sa.ForeignKey("agents.id",
ondelete='CASCADE'))
#neutron/db/loadbalancer/loadbalancer_db.py
class SessionPersistence(BASEV2):
vip_id = sa.Column(sa.String(36),
sa.ForeignKey("vips.id"),
primary_key=True)
type = sa.Column(sa.Enum("SOURCE_IP",
"HTTP_COOKIE",
"APP_COOKIE",
name="sesssionpersistences_type"),
nullable=False)
cookie_name = sa.Column(sa.String(1024))
#neutron/db/loadbalancer/loadbalancer_db.py
class PoolStatistics(BASEV2):
pool_id = sa.Column(sa.String(36), sa.ForeignKey("pools.id"),
primary_key=True)
bytes_in = sa.Column(sa.BigInteger, nullable=False)
bytes_out = sa.Column(sa.BigInteger, nullable=False)
active_connections = sa.Column(sa.BigInteger, nullable=False)
total_connections = sa.Column(sa.BigInteger, nullable=False)
#neutron/db/loadbalancer/loadbalancer_db.py
class Vip(BASEV2, HasId, HasTenant, HasStatusDescription):
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(255))
port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id'))
protocol_port = sa.Column(sa.Integer, nullable=False)
protocol = sa.Column(sa.Enum("HTTP", "HTTPS", "TCP", name="lb_protocols"),
nullable=False)
pool_id = sa.Column(sa.String(36), nullable=False, unique=True)
session_persistence = orm.relationship(SessionPersistence,
uselist=False,
backref="vips",
cascade="all, delete-orphan")
admin_state_up = sa.Column(sa.Boolean(), nullable=False)
connection_limit = sa.Column(sa.Integer)
port = orm.relationship(Port)
#neutron/db/loadbalancer/loadbalancer_db.py
class Member(BASEV2, HasId, HasTenant, HasStatusDescription):
__table_args__ = (
sa.schema.UniqueConstraint('pool_id', 'address', 'protocol_port',
name='uniq_member0pool_id0address0port'),
)
pool_id = sa.Column(sa.String(36), sa.ForeignKey("pools.id"),
nullable=False)
address = sa.Column(sa.String(64), nullable=False)
protocol_port = sa.Column(sa.Integer, nullable=False)
weight = sa.Column(sa.Integer, nullable=False)
admin_state_up = sa.Column(sa.Boolean(), nullable=False)
#neutron/db/loadbalancer/loadbalancer_db.py
class Pool(BASEV2, HasId, HasTenant, HasStatusDescription):
vip_id = sa.Column(sa.String(36), sa.ForeignKey("vips.id"))
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(255))
subnet_id = sa.Column(sa.String(36), nullable=False)
protocol = sa.Column(sa.Enum("HTTP", "HTTPS", "TCP", name="lb_protocols"),
nullable=False)
lb_method = sa.Column(sa.Enum("ROUND_ROBIN",
"LEAST_CONNECTIONS",
"SOURCE_IP",
name="pools_lb_method"),
nullable=False)
admin_state_up = sa.Column(sa.Boolean(), nullable=False)
stats = orm.relationship(PoolStatistics,
uselist=False,
backref="pools",
cascade="all, delete-orphan")
members = orm.relationship(Member, backref="pools",
cascade="all, delete-orphan")
monitors = orm.relationship("PoolMonitorAssociation", backref="pools",
cascade="all, delete-orphan")
vip = orm.relationship(Vip, backref='pool')
provider = orm.relationship(
ProviderResourceAssociation,
uselist=False,
lazy="joined",
primaryjoin="Pool.id==ProviderResourceAssociation.resource_id",
foreign_keys=[ProviderResourceAssociation.resource_id]
)
#neutron/db/loadbalancer/loadbalancer_db.py
class HealthMonitor(BASEV2, HasId, HasTenant):
type = sa.Column(sa.Enum("PING", "TCP", "HTTP", "HTTPS",
name="healthmontiors_type"),
nullable=False)
delay = sa.Column(sa.Integer, nullable=False)
timeout = sa.Column(sa.Integer, nullable=False)
max_retries = sa.Column(sa.Integer, nullable=False)
http_method = sa.Column(sa.String(16))
url_path = sa.Column(sa.String(255))
expected_codes = sa.Column(sa.String(64))
admin_state_up = sa.Column(sa.Boolean(), nullable=False)
pools = orm.relationship(
"PoolMonitorAssociation", backref="healthmonitor",
cascade="all", lazy="joined"
)
#neutron/db/loadbalancer/loadbalancer_db.py
class PoolMonitorAssociation(BASEV2, HasStatusDescription):
pool_id = sa.Column(sa.String(36),
sa.ForeignKey("pools.id"),
primary_key=True)
monitor_id = sa.Column(sa.String(36),
sa.ForeignKey("healthmonitors.id"),
primary_key=True)
#neutron/db/metering/metering_db.py
class MeteringLabelRule(BASEV2, HasId):
direction = sa.Column(sa.Enum('ingress', 'egress',
name='meteringlabels_direction'))
remote_ip_prefix = sa.Column(sa.String(64))
metering_label_id = sa.Column(sa.String(36),
sa.ForeignKey("meteringlabels.id",
ondelete="CASCADE"),
nullable=False)
excluded = sa.Column(sa.Boolean, default=False,
server_default=sa.sql.false())
#neutron/db/metering/metering_db.py
class MeteringLabel(BASEV2, HasId, HasTenant):
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(1024))
rules = orm.relationship(MeteringLabelRule, backref="label",
cascade="delete", lazy="joined")
routers = orm.relationship(
Router,
primaryjoin="MeteringLabel.tenant_id==Router.tenant_id",
foreign_keys='MeteringLabel.tenant_id',
uselist=True)
#neutron/db/portbindings_db.py
class PortBindingPort(BASEV2):
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
host = sa.Column(sa.String(255), nullable=False)
port = orm.relationship(
Port,
backref=orm.backref("portbinding",
lazy='joined', uselist=False,
cascade='delete'))
#neutron/db/portsecurity_db.py
class PortSecurityBinding(BASEV2):
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
port_security_enabled = sa.Column(sa.Boolean(), nullable=False)
port = orm.relationship(
Port,
backref=orm.backref("port_security", uselist=False,
cascade='delete', lazy='joined'))
#neutron/db/portsecurity_db.py
class NetworkSecurityBinding(BASEV2):
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
primary_key=True)
port_security_enabled = sa.Column(sa.Boolean(), nullable=False)
network = orm.relationship(
Network,
backref=orm.backref("port_security", uselist=False,
cascade='delete', lazy='joined'))
#neutron/db/quota_db.py
class Quota(BASEV2, HasId):
tenant_id = sa.Column(sa.String(255), index=True)
resource = sa.Column(sa.String(255))
limit = sa.Column(sa.Integer)
#neutron/db/routedserviceinsertion_db.py
class ServiceRouterBinding(BASEV2):
resource_id = sa.Column(sa.String(36),
primary_key=True)
resource_type = sa.Column(sa.String(36),
primary_key=True)
router_id = sa.Column(sa.String(36),
sa.ForeignKey('routers.id'),
nullable=False)
#neutron/db/routerservicetype_db.py
class RouterServiceTypeBinding(BASEV2):
router_id = sa.Column(sa.String(36),
sa.ForeignKey('routers.id', ondelete="CASCADE"),
primary_key=True)
service_type_id = sa.Column(sa.String(36),
nullable=False)
#neutron/db/securitygroups_db.py
class SecurityGroup(BASEV2, HasId, HasTenant):
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(255))
#neutron/db/securitygroups_db.py
class SecurityGroupPortBinding(BASEV2):
port_id = sa.Column(sa.String(36),
sa.ForeignKey("ports.id",
ondelete='CASCADE'),
primary_key=True)
security_group_id = sa.Column(sa.String(36),
sa.ForeignKey("securitygroups.id"),
primary_key=True)
# Add a relationship to the Port model in order to instruct SQLAlchemy to
# eagerly load security group bindings
ports = orm.relationship(
Port,
backref=orm.backref("security_groups",
lazy='joined', cascade='delete'))
#neutron/db/securitygroups_db.py
class SecurityGroupRule(BASEV2, HasId,
HasTenant):
security_group_id = sa.Column(sa.String(36),
sa.ForeignKey("securitygroups.id",
ondelete="CASCADE"),
nullable=False)
remote_group_id = sa.Column(sa.String(36),
sa.ForeignKey("securitygroups.id",
ondelete="CASCADE"),
nullable=True)
direction = sa.Column(sa.Enum('ingress', 'egress',
name='securitygrouprules_direction'))
ethertype = sa.Column(sa.String(40))
protocol = sa.Column(sa.String(40))
port_range_min = sa.Column(sa.Integer)
port_range_max = sa.Column(sa.Integer)
remote_ip_prefix = sa.Column(sa.String(255))
security_group = orm.relationship(
SecurityGroup,
backref=orm.backref('rules', cascade='all,delete'),
primaryjoin="SecurityGroup.id==SecurityGroupRule.security_group_id")
source_group = orm.relationship(
SecurityGroup,
backref=orm.backref('source_rules', cascade='all,delete'),
primaryjoin="SecurityGroup.id==SecurityGroupRule.remote_group_id")
#neutron/db/vpn/vpn_db.py
class IPsecPeerCidr(BASEV2):
cidr = sa.Column(sa.String(32), nullable=False, primary_key=True)
ipsec_site_connection_id = sa.Column(
sa.String(36),
sa.ForeignKey('ipsec_site_connections.id',
ondelete="CASCADE"),
primary_key=True)
#neutron/db/vpn/vpn_db.py
class IPsecPolicy(BASEV2, HasId, HasTenant):
__tablename__ = 'ipsecpolicies'
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(255))
transform_protocol = sa.Column(sa.Enum("esp", "ah", "ah-esp",
name="ipsec_transform_protocols"),
nullable=False)
auth_algorithm = sa.Column(sa.Enum("sha1",
name="vpn_auth_algorithms"),
nullable=False)
encryption_algorithm = sa.Column(sa.Enum("3des", "aes-128",
"aes-256", "aes-192",
name="vpn_encrypt_algorithms"),
nullable=False)
encapsulation_mode = sa.Column(sa.Enum("tunnel", "transport",
name="ipsec_encapsulations"),
nullable=False)
lifetime_units = sa.Column(sa.Enum("seconds", "kilobytes",
name="vpn_lifetime_units"),
nullable=False)
lifetime_value = sa.Column(sa.Integer, nullable=False)
pfs = sa.Column(sa.Enum("group2", "group5", "group14",
name="vpn_pfs"), nullable=False)
#neutron/db/vpn/vpn_db.py
class IKEPolicy(BASEV2, HasId, HasTenant):
__tablename__ = 'ikepolicies'
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(255))
auth_algorithm = sa.Column(sa.Enum("sha1",
name="vpn_auth_algorithms"),
nullable=False)
encryption_algorithm = sa.Column(sa.Enum("3des", "aes-128",
"aes-256", "aes-192",
name="vpn_encrypt_algorithms"),
nullable=False)
phase1_negotiation_mode = sa.Column(sa.Enum("main",
name="ike_phase1_mode"),
nullable=False)
lifetime_units = sa.Column(sa.Enum("seconds", "kilobytes",
name="vpn_lifetime_units"),
nullable=False)
lifetime_value = sa.Column(sa.Integer, nullable=False)
ike_version = sa.Column(sa.Enum("v1", "v2", name="ike_versions"),
nullable=False)
pfs = sa.Column(sa.Enum("group2", "group5", "group14",
name="vpn_pfs"), nullable=False)
#neutron/db/vpn/vpn_db.py
class IPsecSiteConnection(BASEV2,
HasId, HasTenant):
__tablename__ = 'ipsec_site_connections'
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(255))
peer_address = sa.Column(sa.String(255), nullable=False)
peer_id = sa.Column(sa.String(255), nullable=False)
route_mode = sa.Column(sa.String(8), nullable=False)
mtu = sa.Column(sa.Integer, nullable=False)
initiator = sa.Column(sa.Enum("bi-directional", "response-only",
name="vpn_initiators"), nullable=False)
auth_mode = sa.Column(sa.String(16), nullable=False)
psk = sa.Column(sa.String(255), nullable=False)
dpd_action = sa.Column(sa.Enum("hold", "clear",
"restart", "disabled",
"restart-by-peer", name="vpn_dpd_actions"),
nullable=False)
dpd_interval = sa.Column(sa.Integer, nullable=False)
dpd_timeout = sa.Column(sa.Integer, nullable=False)
status = sa.Column(sa.String(16), nullable=False)
admin_state_up = sa.Column(sa.Boolean(), nullable=False)
vpnservice_id = sa.Column(sa.String(36),
sa.ForeignKey('vpnservices.id'),
nullable=False)
ipsecpolicy_id = sa.Column(sa.String(36),
sa.ForeignKey('ipsecpolicies.id'),
nullable=False)
ikepolicy_id = sa.Column(sa.String(36),
sa.ForeignKey('ikepolicies.id'),
nullable=False)
ipsecpolicy = orm.relationship(
IPsecPolicy, backref='ipsec_site_connection')
ikepolicy = orm.relationship(IKEPolicy, backref='ipsec_site_connection')
peer_cidrs = orm.relationship(IPsecPeerCidr,
backref='ipsec_site_connection',
lazy='joined',
cascade='all, delete, delete-orphan')
#neutron/db/vpn/vpn_db.py
class VPNService(BASEV2, HasId, HasTenant):
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(255))
status = sa.Column(sa.String(16), nullable=False)
admin_state_up = sa.Column(sa.Boolean(), nullable=False)
subnet_id = sa.Column(sa.String(36), sa.ForeignKey('subnets.id'),
nullable=False)
router_id = sa.Column(sa.String(36), sa.ForeignKey('routers.id'),
nullable=False)
subnet = orm.relationship(Subnet)
router = orm.relationship(Router)
ipsec_site_connections = orm.relationship(
IPsecSiteConnection,
backref='vpnservice',
cascade="all, delete-orphan")
#neutron/plugins/bigswitch/db/consistency_db.py
class ConsistencyHash(BASEV2):
__tablename__ = 'consistencyhashes'
hash_id = sa.Column(sa.String(255),
primary_key=True)
hash = sa.Column(sa.String(255), nullable=False)
#neutron/plugins/bigswitch/routerrule_db.py
class RouterRule(BASEV2):
id = sa.Column(sa.Integer, primary_key=True)
source = sa.Column(sa.String(64), nullable=False)
destination = sa.Column(sa.String(64), nullable=False)
nexthops = orm.relationship('NextHop', cascade='all,delete')
action = sa.Column(sa.String(10), nullable=False)
router_id = sa.Column(sa.String(36),
sa.ForeignKey('routers.id',
ondelete="CASCADE"))
#neutron/plugins/bigswitch/routerrule_db.py
class NextHop(BASEV2):
rule_id = sa.Column(sa.Integer,
sa.ForeignKey('routerrules.id',
ondelete="CASCADE"),
primary_key=True)
nexthop = sa.Column(sa.String(64), nullable=False, primary_key=True)
#neutron/plugins/brocade/db/models.py
class BrocadeNetwork(BASEV2, HasId):
vlan = sa.Column(sa.String(10))
#neutron/plugins/brocade/db/models.py
class BrocadePort(BASEV2):
port_id = sa.Column(sa.String(36), primary_key=True, default="",
server_default='')
network_id = sa.Column(sa.String(36),
sa.ForeignKey("brocadenetworks.id"),
nullable=False)
admin_state_up = sa.Column(sa.Boolean, nullable=False)
physical_interface = sa.Column(sa.String(36))
vlan_id = sa.Column(sa.String(36))
tenant_id = sa.Column(sa.String(36))
#neutron/plugins/cisco/db/n1kv_models_v2.py
class N1kvVlanAllocation(BASEV2):
__tablename__ = 'cisco_n1kv_vlan_allocations'
physical_network = sa.Column(sa.String(64),
nullable=False,
primary_key=True)
vlan_id = sa.Column(sa.Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = sa.Column(sa.Boolean, nullable=False, default=False,
server_default=sa.sql.false())
network_profile_id = sa.Column(sa.String(36),
sa.ForeignKey('cisco_network_profiles.id',
ondelete="CASCADE"),
nullable=False)
#neutron/plugins/cisco/db/n1kv_models_v2.py
class N1kvVxlanAllocation(BASEV2):
__tablename__ = 'cisco_n1kv_vxlan_allocations'
vxlan_id = sa.Column(sa.Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = sa.Column(sa.Boolean, nullable=False, default=False,
server_default=sa.sql.false())
network_profile_id = sa.Column(sa.String(36),
sa.ForeignKey('cisco_network_profiles.id',
ondelete="CASCADE"),
nullable=False)
#neutron/plugins/cisco/db/n1kv_models_v2.py
class N1kvPortBinding(BASEV2):
__tablename__ = 'cisco_n1kv_port_bindings'
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
profile_id = sa.Column(sa.String(36),
sa.ForeignKey('cisco_policy_profiles.id'))
#neutron/plugins/cisco/db/n1kv_models_v2.py
class N1kvNetworkBinding(BASEV2):
__tablename__ = 'cisco_n1kv_network_bindings'
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
primary_key=True)
network_type = sa.Column(sa.String(32), nullable=False)
physical_network = sa.Column(sa.String(64))
segmentation_id = sa.Column(sa.Integer)
multicast_ip = sa.Column(sa.String(32))
profile_id = sa.Column(sa.String(36),
sa.ForeignKey('cisco_network_profiles.id'))
#neutron/plugins/cisco/db/n1kv_models_v2.py
class N1kVmNetwork(BASEV2):
__tablename__ = 'cisco_n1kv_vmnetworks'
name = sa.Column(sa.String(80), primary_key=True)
profile_id = sa.Column(sa.String(36),
sa.ForeignKey('cisco_policy_profiles.id'))
network_id = sa.Column(sa.String(36))
port_count = sa.Column(sa.Integer)
#neutron/plugins/cisco/db/n1kv_models_v2.py
class NetworkProfile(BASEV2, HasId):
__tablename__ = 'cisco_network_profiles'
name = sa.Column(sa.String(255))
segment_type = sa.Column(
sa.Enum(CISCO_CONSTANTS_NETWORK_TYPE_VLAN,
CISCO_CONSTANTS_NETWORK_TYPE_OVERLAY,
CISCO_CONSTANTS_NETWORK_TYPE_TRUNK,
CISCO_CONSTANTS_NETWORK_TYPE_MULTI_SEGMENT,
name='segment_type'),
nullable=False)
sub_type = sa.Column(sa.String(255))
segment_range = sa.Column(sa.String(255))
multicast_ip_index = sa.Column(sa.Integer, default=0,
server_default='0')
multicast_ip_range = sa.Column(sa.String(255))
physical_network = sa.Column(sa.String(255))
#neutron/plugins/cisco/db/n1kv_models_v2.py
class PolicyProfile(BASEV2):
__tablename__ = 'cisco_policy_profiles'
id = sa.Column(sa.String(36), primary_key=True)
name = sa.Column(sa.String(255))
#neutron/plugins/cisco/db/n1kv_models_v2.py
class ProfileBinding(BASEV2):
__tablename__ = 'cisco_n1kv_profile_bindings'
profile_type = sa.Column(sa.Enum(CISCO_CONSTANTS_NETWORK,
CISCO_CONSTANTS_POLICY,
name='profile_type'))
tenant_id = sa.Column(sa.String(36),
primary_key=True,
default=CISCO_CONSTANTS_TENANT_ID_NOT_SET,
server_default=CISCO_CONSTANTS_TENANT_ID_NOT_SET)
profile_id = sa.Column(sa.String(36), primary_key=True)
#neutron/plugins/cisco/db/n1kv_models_v2.py
class N1kvTrunkSegmentBinding(BASEV2):
__tablename__ = 'cisco_n1kv_trunk_segments'
trunk_segment_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id',
ondelete="CASCADE"),
primary_key=True)
segment_id = sa.Column(sa.String(36), nullable=False, primary_key=True)
dot1qtag = sa.Column(sa.String(36), nullable=False, primary_key=True)
#neutron/plugins/cisco/db/n1kv_models_v2.py
class N1kvMultiSegmentNetworkBinding(BASEV2):
__tablename__ = 'cisco_n1kv_multi_segments'
multi_segment_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id',
ondelete="CASCADE"),
primary_key=True)
segment1_id = sa.Column(sa.String(36), nullable=False, primary_key=True)
segment2_id = sa.Column(sa.String(36), nullable=False, primary_key=True)
encap_profile_name = sa.Column(sa.String(36))
#neutron/plugins/cisco/db/network_models_v2.py
class QoS(BASEV2):
__tablename__ = 'cisco_qos_policies'
qos_id = sa.Column(sa.String(255))
tenant_id = sa.Column(sa.String(255), primary_key=True)
qos_name = sa.Column(sa.String(255), primary_key=True)
qos_desc = sa.Column(sa.String(255))
#neutron/plugins/cisco/db/network_models_v2.py
class Credential(BASEV2):
__tablename__ = 'cisco_credentials'
credential_id = sa.Column(sa.String(255))
credential_name = sa.Column(sa.String(255), primary_key=True)
user_name = sa.Column(sa.String(255))
password = sa.Column(sa.String(255))
type = sa.Column(sa.String(255))
#neutron/plugins/cisco/db/network_models_v2.py
class ProviderNetwork(BASEV2):
__tablename__ = 'cisco_provider_networks'
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
primary_key=True)
network_type = sa.Column(sa.String(255), nullable=False)
segmentation_id = sa.Column(sa.Integer, nullable=False)
#neutron/plugins/cisco/db/nexus_models_v2.py
#class was renamed from NexusPortBinding to CiscoNexusPortBinding
class CiscoNexusPortBinding(BASEV2):
__tablename__ = "cisco_nexusport_bindings"
id = sa.Column(sa.Integer, primary_key=True, autoincrement=True)
port_id = sa.Column(sa.String(255))
vlan_id = sa.Column(sa.Integer, nullable=False)
switch_ip = sa.Column(sa.String(255), nullable=False)
instance_id = sa.Column(sa.String(255), nullable=False)
#neutron/plugins/hyperv/model.py
#class was renamed from VlanAllocation to HyperVVlanAllocation
class HyperVVlanAllocation(BASEV2):
__tablename__ = 'hyperv_vlan_allocations'
physical_network = sa.Column(sa.String(64),
nullable=False,
primary_key=True)
vlan_id = sa.Column(sa.Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = sa.Column(sa.Boolean, nullable=False)
#neutron/plugins/hyperv/model.py
#class was renamed from NetworkBinding to HyperVNetworkBinding
class HyperVNetworkBinding(BASEV2):
__tablename__ = 'hyperv_network_bindings'
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
primary_key=True)
network_type = sa.Column(sa.String(32), nullable=False)
physical_network = sa.Column(sa.String(64))
segmentation_id = sa.Column(sa.Integer)
#neutron/plugins/linuxbridge/db/l2network_models_v2.py
class NetworkState(BASEV2):
__tablename__ = 'network_states'
physical_network = sa.Column(sa.String(64), nullable=False,
primary_key=True)
vlan_id = sa.Column(sa.Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = sa.Column(sa.Boolean, nullable=False)
#neutron/plugins/linuxbridge/db/l2network_models_v2.py
class NetworkBinding(BASEV2):
__tablename__ = 'network_bindings'
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
primary_key=True)
physical_network = sa.Column(sa.String(64))
vlan_id = sa.Column(sa.Integer, nullable=False)
#neutron/plugins/metaplugin/meta_models_v2.py
class NetworkFlavor(BASEV2):
flavor = sa.Column(sa.String(255))
network_id = sa.Column(sa.String(36), sa.ForeignKey('networks.id',
ondelete="CASCADE"),
primary_key=True)
#neutron/plugins/metaplugin/meta_models_v2.py
class RouterFlavor(BASEV2):
flavor = sa.Column(sa.String(255))
router_id = sa.Column(sa.String(36), sa.ForeignKey('routers.id',
ondelete="CASCADE"),
primary_key=True)
#neutron/plugins/ml2/drivers/brocade/db/models.py
class ML2_BrocadeNetwork(BASEV2, HasId,
HasTenant):
vlan = sa.Column(sa.String(10))
segment_id = sa.Column(sa.String(36))
network_type = sa.Column(sa.String(10))
#neutron/plugins/ml2/drivers/brocade/db/models.py
class ML2_BrocadePort(BASEV2, HasId,
HasTenant):
network_id = sa.Column(sa.String(36),
sa.ForeignKey("ml2_brocadenetworks.id"),
nullable=False)
admin_state_up = sa.Column(sa.Boolean, nullable=False)
physical_interface = sa.Column(sa.String(36))
vlan_id = sa.Column(sa.String(36))
#neutron/plugins/ml2/drivers/cisco/apic/apic_model.py
class NetworkEPG(BASEV2):
__tablename__ = 'cisco_ml2_apic_epgs'
network_id = sa.Column(sa.String(255), nullable=False, primary_key=True)
epg_id = sa.Column(sa.String(64), nullable=False)
segmentation_id = sa.Column(sa.String(64), nullable=False)
provider = sa.Column(sa.Boolean, default=False,
server_default=sa.sql.false(), nullable=False)
#neutron/plugins/ml2/drivers/cisco/apic/apic_model.py
class PortProfile(BASEV2):
__tablename__ = 'cisco_ml2_apic_port_profiles'
node_id = sa.Column(sa.String(255), nullable=False, primary_key=True)
profile_id = sa.Column(sa.String(64), nullable=False)
hpselc_id = sa.Column(sa.String(64), nullable=False)
module = sa.Column(sa.String(10), nullable=False)
from_port = sa.Column(sa.Integer(), nullable=False)
to_port = sa.Column(sa.Integer(), nullable=False)
#neutron/plugins/ml2/drivers/cisco/apic/apic_model.py
class TenantContract(BASEV2, HasTenant):
__tablename__ = 'cisco_ml2_apic_contracts'
__table_args__ = (sa.PrimaryKeyConstraint('tenant_id'),)
contract_id = sa.Column(sa.String(64), nullable=False)
filter_id = sa.Column(sa.String(64), nullable=False)
#neutron/plugins/ml2/drivers/cisco/nexus/nexus_models_v2.py
#class was renamed from NexusPortBinding to CiscoMl2NexusPortBinding
class CiscoMl2NexusPortBinding(BASEV2):
__tablename__ = "cisco_ml2_nexusport_bindings"
binding_id = sa.Column(sa.Integer, primary_key=True, autoincrement=True)
port_id = sa.Column(sa.String(255))
vlan_id = sa.Column(sa.Integer, nullable=False)
switch_ip = sa.Column(sa.String(255))
instance_id = sa.Column(sa.String(255))
#neutron/plugins/ml2/drivers/mech_arista/db.py
class AristaProvisionedNets(BASEV2, HasId,
HasTenant):
__tablename__ = 'arista_provisioned_nets'
network_id = sa.Column(sa.String(UUID_LEN))
segmentation_id = sa.Column(sa.Integer)
#neutron/plugins/ml2/drivers/mech_arista/db.py
class AristaProvisionedVms(BASEV2, HasId,
HasTenant):
__tablename__ = 'arista_provisioned_vms'
vm_id = sa.Column(sa.String(STR_LEN))
host_id = sa.Column(sa.String(STR_LEN))
port_id = sa.Column(sa.String(UUID_LEN))
network_id = sa.Column(sa.String(UUID_LEN))
#neutron/plugins/ml2/drivers/mech_arista/db.py
class AristaProvisionedTenants(BASEV2, HasId,
HasTenant):
__tablename__ = 'arista_provisioned_tenants'
#neutron/plugins/ml2/drivers/type_flat.py
class FlatAllocation(BASEV2):
__tablename__ = 'ml2_flat_allocations'
physical_network = sa.Column(sa.String(64), nullable=False,
primary_key=True)
#neutron/plugins/ml2/drivers/type_gre.py
class GreAllocation(BASEV2):
__tablename__ = 'ml2_gre_allocations'
gre_id = sa.Column(sa.Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = sa.Column(sa.Boolean, nullable=False, default=False,
server_default=sa.sql.false())
#neutron/plugins/ml2/drivers/type_gre.py
class GreEndpoints(BASEV2):
__tablename__ = 'ml2_gre_endpoints'
ip_address = sa.Column(sa.String(64), primary_key=True)
#neutron/plugins/ml2/drivers/type_vlan.py
#class was renamed from VlanAllocation to Ml2VlanAllocation
class Ml2VlanAllocation(BASEV2):
__tablename__ = 'ml2_vlan_allocations'
physical_network = sa.Column(sa.String(64), nullable=False,
primary_key=True)
vlan_id = sa.Column(sa.Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = sa.Column(sa.Boolean, nullable=False)
#neutron/plugins/ml2/drivers/type_vxlan.py
class VxlanAllocation(BASEV2):
__tablename__ = 'ml2_vxlan_allocations'
vxlan_vni = sa.Column(sa.Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = sa.Column(sa.Boolean, nullable=False, default=False,
server_default=sa.sql.false())
#neutron/plugins/ml2/drivers/type_vxlan.py
class VxlanEndpoints(BASEV2):
__tablename__ = 'ml2_vxlan_endpoints'
ip_address = sa.Column(sa.String(64), primary_key=True)
udp_port = sa.Column(sa.Integer, primary_key=True, nullable=False,
autoincrement=False)
#neutron/plugins/ml2/models.py
class NetworkSegment(BASEV2, HasId):
__tablename__ = 'ml2_network_segments'
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
nullable=False)
network_type = sa.Column(sa.String(32), nullable=False)
physical_network = sa.Column(sa.String(64))
segmentation_id = sa.Column(sa.Integer)
#neutron/plugins/ml2/models.py
class PortBinding(BASEV2):
__tablename__ = 'ml2_port_bindings'
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
host = sa.Column(sa.String(255), nullable=False, default='',
server_default='')
vnic_type = sa.Column(sa.String(64), nullable=False,
default=VNIC_NORMAL, server_default=VNIC_NORMAL)
profile = sa.Column(sa.String(BINDING_PROFILE_LEN), nullable=False,
default='', server_default='')
vif_type = sa.Column(sa.String(64), nullable=False)
vif_details = sa.Column(sa.String(4095), nullable=False, default='',
server_default='')
driver = sa.Column(sa.String(64))
segment = sa.Column(sa.String(36),
sa.ForeignKey('ml2_network_segments.id',
ondelete="SET NULL"))
port = orm.relationship(
Port,
backref=orm.backref("port_binding",
lazy='joined', uselist=False,
cascade='delete'))
#neutron/plugins/mlnx/db/mlnx_models_v2.py
class SegmentationIdAllocation(BASEV2):
__tablename__ = 'segmentation_id_allocation'
physical_network = sa.Column(sa.String(64), nullable=False,
primary_key=True)
segmentation_id = sa.Column(sa.Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = sa.Column(sa.Boolean, nullable=False, default=False,
server_default=sa.sql.false())
#neutron/plugins/mlnx/db/mlnx_models_v2.py
#class was renamed from NetworkBinding to MlnxNetworkBinding
class MlnxNetworkBinding(BASEV2):
__tablename__ = 'mlnx_network_bindings'
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
primary_key=True)
network_type = sa.Column(sa.String(32), nullable=False)
physical_network = sa.Column(sa.String(64))
segmentation_id = sa.Column(sa.Integer, nullable=False)
#neutron/plugins/mlnx/db/mlnx_models_v2.py
class PortProfileBinding(BASEV2):
__tablename__ = 'port_profile'
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
vnic_type = sa.Column(sa.String(32), nullable=False)
#neutron/plugins/nec/db/models.py
class OFCId(object):
ofc_id = sa.Column(sa.String(255), unique=True, nullable=False)
#neutron/plugins/nec/db/models.py
class NeutronId(object):
neutron_id = sa.Column(sa.String(36), primary_key=True)
#neutron/plugins/nec/db/models.py
class OFCTenantMapping(BASEV2, NeutronId, OFCId):
"""Represents a Tenant on OpenFlow Network/Controller."""
#neutron/plugins/nec/db/models.py
class OFCNetworkMapping(BASEV2, NeutronId, OFCId):
"""Represents a Network on OpenFlow Network/Controller."""
#neutron/plugins/nec/db/models.py
class OFCPortMapping(BASEV2, NeutronId, OFCId):
"""Represents a Port on OpenFlow Network/Controller."""
#neutron/plugins/nec/db/models.py
class OFCRouterMapping(BASEV2, NeutronId, OFCId):
"""Represents a router on OpenFlow Network/Controller."""
#neutron/plugins/nec/db/models.py
class OFCFilterMapping(BASEV2, NeutronId, OFCId):
"""Represents a Filter on OpenFlow Network/Controller."""
#neutron/plugins/nec/db/models.py
class PortInfo(BASEV2):
id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
datapath_id = sa.Column(sa.String(36), nullable=False)
port_no = sa.Column(sa.Integer, nullable=False)
vlan_id = sa.Column(sa.Integer, nullable=False)
mac = sa.Column(sa.String(32), nullable=False)
port = orm.relationship(
Port,
backref=orm.backref("portinfo",
lazy='joined', uselist=False,
cascade='delete'))
#neutron/plugins/nec/db/packetfilter.py
class PacketFilter(BASEV2, HasId, HasTenant):
name = sa.Column(sa.String(255))
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
nullable=False)
priority = sa.Column(sa.Integer, nullable=False)
action = sa.Column(sa.String(16), nullable=False)
in_port = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
nullable=True)
src_mac = sa.Column(sa.String(32), nullable=False)
dst_mac = sa.Column(sa.String(32), nullable=False)
eth_type = sa.Column(sa.Integer, nullable=False)
src_cidr = sa.Column(sa.String(64), nullable=False)
dst_cidr = sa.Column(sa.String(64), nullable=False)
protocol = sa.Column(sa.String(16), nullable=False)
src_port = sa.Column(sa.Integer, nullable=False)
dst_port = sa.Column(sa.Integer, nullable=False)
admin_state_up = sa.Column(sa.Boolean(), nullable=False)
status = sa.Column(sa.String(16), nullable=False)
network = orm.relationship(
Network,
backref=orm.backref('packetfilters', lazy='joined', cascade='delete'),
uselist=False)
in_port_ref = orm.relationship(
Port,
backref=orm.backref('packetfilters', lazy='joined', cascade='delete'),
primaryjoin="Port.id==PacketFilter.in_port",
uselist=False)
#neutron/plugins/nec/db/router.py
class RouterProvider(BASEV2):
provider = sa.Column(sa.String(255))
router_id = sa.Column(sa.String(36),
sa.ForeignKey('routers.id', ondelete="CASCADE"),
primary_key=True)
router = orm.relationship(Router, uselist=False,
backref=orm.backref('provider', uselist=False,
lazy='joined',
cascade='delete'))
#neutron/plugins/nuage/nuage_models.py
class NetPartition(BASEV2, HasId):
__tablename__ = 'nuage_net_partitions'
name = sa.Column(sa.String(64))
l3dom_tmplt_id = sa.Column(sa.String(36))
l2dom_tmplt_id = sa.Column(sa.String(36))
#neutron/plugins/nuage/nuage_models.py
class NetPartitionRouter(BASEV2):
__tablename__ = "nuage_net_partition_router_mapping"
net_partition_id = sa.Column(sa.String(36),
sa.ForeignKey('nuage_net_partitions.id',
ondelete="CASCADE"),
primary_key=True)
router_id = sa.Column(sa.String(36),
sa.ForeignKey('routers.id', ondelete="CASCADE"),
primary_key=True)
nuage_router_id = sa.Column(sa.String(36))
#neutron/plugins/nuage/nuage_models.py
class RouterZone(BASEV2):
__tablename__ = "nuage_router_zone_mapping"
router_id = sa.Column(sa.String(36),
sa.ForeignKey('routers.id', ondelete="CASCADE"),
primary_key=True)
nuage_zone_id = sa.Column(sa.String(36))
nuage_user_id = sa.Column(sa.String(36))
nuage_group_id = sa.Column(sa.String(36))
#neutron/plugins/nuage/nuage_models.py
class SubnetL2Domain(BASEV2):
__tablename__ = 'nuage_subnet_l2dom_mapping'
subnet_id = sa.Column(sa.String(36),
sa.ForeignKey('subnets.id', ondelete="CASCADE"),
primary_key=True)
net_partition_id = sa.Column(sa.String(36),
sa.ForeignKey('nuage_net_partitions.id',
ondelete="CASCADE"))
nuage_subnet_id = sa.Column(sa.String(36))
nuage_l2dom_tmplt_id = sa.Column(sa.String(36))
nuage_user_id = sa.Column(sa.String(36))
nuage_group_id = sa.Column(sa.String(36))
#neutron/plugins/nuage/nuage_models.py
class PortVPortMapping(BASEV2):
__tablename__ = 'nuage_port_mapping'
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
nuage_vport_id = sa.Column(sa.String(36))
nuage_vif_id = sa.Column(sa.String(36))
static_ip = sa.Column(sa.Boolean())
#neutron/plugins/nuage/nuage_models.py
class RouterRoutesMapping(BASEV2, Route):
__tablename__ = 'nuage_routerroutes_mapping'
router_id = sa.Column(sa.String(36),
sa.ForeignKey('routers.id',
ondelete="CASCADE"),
primary_key=True,
nullable=False)
nuage_route_id = sa.Column(sa.String(36))
#neutron/plugins/nuage/nuage_models.py
class FloatingIPPoolMapping(BASEV2):
__tablename__ = "nuage_floatingip_pool_mapping"
fip_pool_id = sa.Column(sa.String(36), primary_key=True)
net_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"))
router_id = sa.Column(sa.String(36))
#neutron/plugins/nuage/nuage_models.py
class FloatingIPMapping(BASEV2):
__tablename__ = 'nuage_floatingip_mapping'
fip_id = sa.Column(sa.String(36),
sa.ForeignKey('floatingips.id',
ondelete="CASCADE"),
primary_key=True)
router_id = sa.Column(sa.String(36))
nuage_fip_id = sa.Column(sa.String(36))
#neutron/plugins/openvswitch/ovs_models_v2.py
#class was renamed from VlanAllocation to OvsVlanAllocation
class OvsVlanAllocation(BASEV2):
__tablename__ = 'ovs_vlan_allocations'
physical_network = sa.Column(sa.String(64),
nullable=False,
primary_key=True)
vlan_id = sa.Column(sa.Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = sa.Column(sa.Boolean, nullable=False)
#neutron/plugins/openvswitch/ovs_models_v2.py
class TunnelAllocation(BASEV2):
__tablename__ = 'ovs_tunnel_allocations'
tunnel_id = sa.Column(sa.Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = sa.Column(sa.Boolean, nullable=False)
#neutron/plugins/openvswitch/ovs_models_v2.py
#class was renamed from NetworkBinding to OvsNetworkBinding
class OvsNetworkBinding(BASEV2):
__tablename__ = 'ovs_network_bindings'
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
primary_key=True)
# 'gre', 'vlan', 'flat', 'local'
network_type = sa.Column(sa.String(32), nullable=False)
physical_network = sa.Column(sa.String(64))
segmentation_id = sa.Column(sa.Integer) # tunnel_id or vlan_id
network = orm.relationship(
Network,
backref=orm.backref("binding", lazy='joined',
uselist=False, cascade='delete'))
#neutron/plugins/openvswitch/ovs_models_v2.py
class TunnelEndpoint(BASEV2):
__tablename__ = 'ovs_tunnel_endpoints'
__table_args__ = (
schema.UniqueConstraint('id', name='uniq_ovs_tunnel_endpoints0id'),
BASEV2.__table_args__,
)
ip_address = sa.Column(sa.String(64), primary_key=True)
id = sa.Column(sa.Integer, nullable=False)
#neutron/plugins/ryu/db/models_v2.py
class TunnelKeyLast(BASEV2):
last_key = sa.Column(sa.Integer, primary_key=True)
#neutron/plugins/ryu/db/models_v2.py
class TunnelKey(BASEV2):
network_id = sa.Column(sa.String(36), sa.ForeignKey("networks.id"),
nullable=False)
tunnel_key = sa.Column(sa.Integer, primary_key=True,
nullable=False, autoincrement=False)
#neutron/plugins/vmware/dbexts/lsn_db.py
class LsnPort(BASEV2):
__tablename__ = 'lsn_port'
lsn_port_id = sa.Column(sa.String(36), primary_key=True)
lsn_id = sa.Column(sa.String(36), sa.ForeignKey('lsn.lsn_id',
ondelete="CASCADE"),
nullable=False)
sub_id = sa.Column(sa.String(36), nullable=False, unique=True)
mac_addr = sa.Column(sa.String(32), nullable=False, unique=True)
#neutron/plugins/vmware/dbexts/lsn_db.py
class Lsn(BASEV2):
__tablename__ = 'lsn'
lsn_id = sa.Column(sa.String(36), primary_key=True)
net_id = sa.Column(sa.String(36), nullable=False)
#neutron/plugins/vmware/dbexts/maclearning.py
class MacLearningState(BASEV2):
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
mac_learning_enabled = sa.Column(sa.Boolean(), nullable=False)
port = orm.relationship(
Port,
backref=orm.backref("mac_learning_state", lazy='joined',
uselist=False, cascade='delete'))
#neutron/plugins/vmware/dbexts/models.py
class TzNetworkBinding(BASEV2):
__tablename__ = 'tz_network_bindings'
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
primary_key=True)
binding_type = sa.Column(sa.Enum('flat', 'vlan', 'stt', 'gre', 'l3_ext',
name='tz_network_bindings_binding_type'),
nullable=False, primary_key=True)
phy_uuid = sa.Column(sa.String(36), primary_key=True, nullable=True)
vlan_id = sa.Column(sa.Integer, primary_key=True, nullable=True,
autoincrement=False)
#neutron/plugins/vmware/dbexts/models.py
class NeutronNsxNetworkMapping(BASEV2):
__tablename__ = 'neutron_nsx_network_mappings'
neutron_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete='CASCADE'),
primary_key=True)
nsx_id = sa.Column(sa.String(36), primary_key=True)
#neutron/plugins/vmware/dbexts/models.py
class NeutronNsxSecurityGroupMapping(BASEV2):
__tablename__ = 'neutron_nsx_security_group_mappings'
neutron_id = sa.Column(sa.String(36),
sa.ForeignKey('securitygroups.id',
ondelete="CASCADE"),
primary_key=True)
nsx_id = sa.Column(sa.String(36), primary_key=True)
#neutron/plugins/vmware/dbexts/models.py
class NeutronNsxPortMapping(BASEV2):
__tablename__ = 'neutron_nsx_port_mappings'
neutron_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
nsx_switch_id = sa.Column(sa.String(36))
nsx_port_id = sa.Column(sa.String(36), nullable=False)
#neutron/plugins/vmware/dbexts/models.py
class NeutronNsxRouterMapping(BASEV2):
__tablename__ = 'neutron_nsx_router_mappings'
neutron_id = sa.Column(sa.String(36),
sa.ForeignKey('routers.id', ondelete='CASCADE'),
primary_key=True)
nsx_id = sa.Column(sa.String(36))
#neutron/plugins/vmware/dbexts/models.py
class MultiProviderNetworks(BASEV2):
__tablename__ = 'multi_provider_networks'
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
primary_key=True)
#neutron/plugins/vmware/dbexts/models.py
class NSXRouterExtAttributes(BASEV2):
router_id = sa.Column(sa.String(36),
sa.ForeignKey('routers.id', ondelete="CASCADE"),
primary_key=True)
distributed = sa.Column(sa.Boolean, default=False,
server_default=sa.sql.false(), nullable=False)
service_router = sa.Column(sa.Boolean, default=False,
server_default=sa.sql.false(), nullable=False)
router = orm.relationship(
Router,
backref=orm.backref("nsx_attributes", lazy='joined',
uselist=False, cascade='delete'))
#neutron/plugins/vmware/dbexts/networkgw_db.py
class NetworkConnection(BASEV2, HasTenant):
network_gateway_id = sa.Column(sa.String(36),
sa.ForeignKey('networkgateways.id',
ondelete='CASCADE'))
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete='CASCADE'))
segmentation_type = sa.Column(
sa.Enum('flat', 'vlan',
name='networkconnections_segmentation_type'))
segmentation_id = sa.Column(sa.Integer)
__table_args__ = (sa.UniqueConstraint(network_gateway_id,
segmentation_type,
segmentation_id),)
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete='CASCADE'),
primary_key=True)
#neutron/plugins/vmware/dbexts/networkgw_db.py
class NetworkGatewayDeviceReference(BASEV2):
id = sa.Column(sa.String(36), primary_key=True)
network_gateway_id = sa.Column(sa.String(36),
sa.ForeignKey('networkgateways.id',
ondelete='CASCADE'),
primary_key=True)
interface_name = sa.Column(sa.String(64), primary_key=True)
#neutron/plugins/vmware/dbexts/networkgw_db.py
class NetworkGatewayDevice(BASEV2, HasId,
HasTenant):
nsx_id = sa.Column(sa.String(36))
# Optional name for the gateway device
name = sa.Column(sa.String(255))
# Transport connector type. Not using enum as range of
# connector types might vary with backend version
connector_type = sa.Column(sa.String(10))
# Transport connector IP Address
connector_ip = sa.Column(sa.String(64))
# operational status
status = sa.Column(sa.String(16))
#neutron/plugins/vmware/dbexts/networkgw_db.py
class NetworkGateway(BASEV2, HasId,
HasTenant):
name = sa.Column(sa.String(255))
# Tenant id is nullable for this resource
tenant_id = sa.Column(sa.String(36))
default = sa.Column(sa.Boolean())
devices = orm.relationship(NetworkGatewayDeviceReference,
backref='networkgateways',
cascade='all,delete')
network_connections = orm.relationship(NetworkConnection, lazy='joined')
#neutron/plugins/vmware/dbexts/qos_db.py
class QoSQueue(BASEV2, HasId, HasTenant):
name = sa.Column(sa.String(255))
default = sa.Column(sa.Boolean, default=False,
server_default=sa.sql.false())
min = sa.Column(sa.Integer, nullable=False)
max = sa.Column(sa.Integer, nullable=True)
qos_marking = sa.Column(sa.Enum('untrusted', 'trusted',
name='qosqueues_qos_marking'))
dscp = sa.Column(sa.Integer)
#neutron/plugins/vmware/dbexts/qos_db.py
class PortQueueMapping(BASEV2):
port_id = sa.Column(sa.String(36),
sa.ForeignKey("ports.id", ondelete="CASCADE"),
primary_key=True)
queue_id = sa.Column(sa.String(36), sa.ForeignKey("qosqueues.id"),
primary_key=True)
# Add a relationship to the Port model adding a backref which will
# allow SQLAlchemy for eagerly load the queue binding
port = orm.relationship(
Port,
backref=orm.backref("qos_queue", uselist=False,
cascade='delete', lazy='joined'))
#neutron/plugins/vmware/dbexts/qos_db.py
class NetworkQueueMapping(BASEV2):
network_id = sa.Column(sa.String(36),
sa.ForeignKey("networks.id", ondelete="CASCADE"),
primary_key=True)
queue_id = sa.Column(sa.String(36), sa.ForeignKey("qosqueues.id",
ondelete="CASCADE"))
# Add a relationship to the Network model adding a backref which will
# allow SQLAlcremy for eagerly load the queue binding
network = orm.relationship(
Network,
backref=orm.backref("qos_queue", uselist=False,
cascade='delete', lazy='joined'))
#neutron/plugins/vmware/dbexts/vcns_models.py
class VcnsRouterBinding(BASEV2, HasStatusDescription):
__tablename__ = 'vcns_router_bindings'
# no sa.ForeignKey to routers.id because for now, a router can be removed
# from routers when delete_router is executed, but the binding is only
# removed after the Edge is deleted
router_id = sa.Column(sa.String(36),
primary_key=True)
edge_id = sa.Column(sa.String(16),
nullable=True)
lswitch_id = sa.Column(sa.String(36),
nullable=False)
#neutron/plugins/vmware/dbexts/vcns_models.py
class VcnsEdgeFirewallRuleBinding(BASEV2):
__tablename__ = 'vcns_firewall_rule_bindings'
rule_id = sa.Column(sa.String(36),
sa.ForeignKey("firewall_rules.id"),
primary_key=True)
edge_id = sa.Column(sa.String(36), primary_key=True)
rule_vseid = sa.Column(sa.String(36))
#neutron/plugins/vmware/dbexts/vcns_models.py
class VcnsEdgePoolBinding(BASEV2):
__tablename__ = 'vcns_edge_pool_bindings'
pool_id = sa.Column(sa.String(36),
sa.ForeignKey("pools.id", ondelete="CASCADE"),
primary_key=True)
edge_id = sa.Column(sa.String(36), primary_key=True)
pool_vseid = sa.Column(sa.String(36))
#neutron/plugins/vmware/dbexts/vcns_models.py
class VcnsEdgeVipBinding(BASEV2):
__tablename__ = 'vcns_edge_vip_bindings'
vip_id = sa.Column(sa.String(36),
sa.ForeignKey("vips.id", ondelete="CASCADE"),
primary_key=True)
edge_id = sa.Column(sa.String(36))
vip_vseid = sa.Column(sa.String(36))
app_profileid = sa.Column(sa.String(36))
#neutron/plugins/vmware/dbexts/vcns_models.py
class VcnsEdgeMonitorBinding(BASEV2):
__tablename__ = 'vcns_edge_monitor_bindings'
monitor_id = sa.Column(sa.String(36),
sa.ForeignKey("healthmonitors.id",
ondelete="CASCADE"),
primary_key=True)
edge_id = sa.Column(sa.String(36), primary_key=True)
monitor_vseid = sa.Column(sa.String(36))
#neutron/services/loadbalancer/agent_scheduler.py
class PoolLoadbalancerAgentBinding(BASEV2):
pool_id = sa.Column(sa.String(36),
sa.ForeignKey("pools.id", ondelete='CASCADE'),
primary_key=True)
agent = orm.relation(Agent)
agent_id = sa.Column(sa.String(36), sa.ForeignKey("agents.id",
ondelete='CASCADE'),
nullable=False)
#neutron/services/loadbalancer/drivers/embrane/models.py
class PoolPort(BASEV2):
__tablename__ = 'embrane_pool_port'
pool_id = sa.Column(sa.String(36), sa.ForeignKey('pools.id'),
primary_key=True)
port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id'),
nullable=False)
#neutron/services/vpn/service_drivers/cisco_csr_db.py
class IdentifierMap(BASEV2, HasTenant):
__tablename__ = 'cisco_csr_identifier_map'
ipsec_site_conn_id = sa.Column(sa.String(64),
sa.ForeignKey('ipsec_site_connections.id',
ondelete="CASCADE"),
primary_key=True)
csr_tunnel_id = sa.Column(sa.Integer, nullable=False)
csr_ike_policy_id = sa.Column(sa.Integer, nullable=False)
csr_ipsec_policy_id = sa.Column(sa.Integer, nullable=False)
def get_metadata():
return BASEV2.metadata
| apache-2.0 | -8,167,522,142,285,659,000 | 38.311449 | 78 | 0.595727 | false |
walteryang47/ovirt-engine | packaging/setup/plugins/ovirt-engine-setup/ovirt-engine-common/distro-rpm/packages.py | 6 | 17845 | #
# ovirt-engine-setup -- ovirt engine setup
# Copyright (C) 2013-2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Package upgrade plugin.
"""
import datetime
import gettext
import os
import platform
from otopi import constants as otopicons
from otopi import plugin, transaction, util
from ovirt_engine_setup import constants as osetupcons
from ovirt_setup_lib import dialog
from ovirt_engine_setup import util as osetuputil
def _(m):
return gettext.dgettext(message=m, domain='ovirt-engine-setup')
@util.export
class Plugin(plugin.PluginBase):
"""
Package upgrade plugin.
"""
class VersionLockTransaction(transaction.TransactionElement):
"""
version lock transaction element.
Not that this is real transaction, but we need to
rollback/commit same as packager.
We cannot actually prepare the transaction at preparation
because new packages are not installed.
But we must restore file as we do not know what packages
were locked at previous version.
"""
_VERSIONLOCK_LIST_FILES = (
osetupcons.FileLocations.OVIRT_ENGINE_YUM_VERSIONLOCK,
osetupcons.FileLocations.OVIRT_ENGINE_DNF_VERSIONLOCK,
)
def _filterVersionLock(self):
modified = {}
content = {}
for versionlock_list_file in self._VERSIONLOCK_LIST_FILES:
modified[versionlock_list_file] = False
content[versionlock_list_file] = []
if os.path.exists(versionlock_list_file):
with open(versionlock_list_file, 'r') as f:
for line in f.read().splitlines():
found = False
for pattern in self.environment[
osetupcons.RPMDistroEnv.VERSION_LOCK_FILTER
]:
if line.find(pattern) != -1:
found = True
break
if not found:
content[versionlock_list_file].append(line)
else:
modified[versionlock_list_file] = True
return (modified, content)
@property
def environment(self):
return self._parent.environment
def __init__(self, parent):
self._parent = parent
self._backup = {}
def __str__(self):
return _("Version Lock Transaction")
def prepare(self):
if not self._parent._enabled:
return
modified, content = self._filterVersionLock()
for versionlock_list_file in self._VERSIONLOCK_LIST_FILES:
if modified[versionlock_list_file]:
self._backup[versionlock_list_file] = '%s.%s' % (
versionlock_list_file,
datetime.datetime.now().strftime('%Y%m%d%H%M%S'),
)
os.rename(
versionlock_list_file,
self._backup[versionlock_list_file],
)
with open(
versionlock_list_file,
'w'
) as f:
f.write(
'\n'.join(content[versionlock_list_file]) + '\n'
)
def abort(self):
for versionlock_list_file in self._VERSIONLOCK_LIST_FILES:
if (
versionlock_list_file in self._backup and
os.path.exists(self._backup[versionlock_list_file])
):
os.rename(
self._backup[versionlock_list_file],
versionlock_list_file,
)
def commit(self):
# This must be always execucted so we be sure we
# are locked
# execute rpm directly
# yum is not good in offline usage
if self.environment[osetupcons.RPMDistroEnv.VERSION_LOCK_APPLY]:
rc, out, err = self._parent.execute(
args=(
self._parent.command.get('rpm'),
'-q',
) + tuple(
set(
self.environment[
osetupcons.RPMDistroEnv.VERSION_LOCK_APPLY
]
)
),
)
changes = []
for line in out:
changes.append(
{
'added': line,
}
)
versionlock_uninstall_group = self.environment[
osetupcons.CoreEnv.REGISTER_UNINSTALL_GROUPS
].createGroup(
group='versionlock',
description='YUM version locking configuration',
optional=False
)
modified, content = self._filterVersionLock()
for versionlock_list_file in self._VERSIONLOCK_LIST_FILES:
self.environment[
osetupcons.CoreEnv.UNINSTALL_UNREMOVABLE_FILES
].append(versionlock_list_file)
if os.path.exists(versionlock_list_file):
versionlock_uninstall_group.addChanges(
'versionlock',
versionlock_list_file,
changes,
)
content[versionlock_list_file].extend(out)
with open(
versionlock_list_file,
'w',
) as f:
f.write(
'\n'.join(
content[versionlock_list_file]
) + '\n'
)
def _getSink(self):
pm = self._PM
class MyPMSink(self._MiniPMSinkBase):
def __init__(self, log):
super(MyPMSink, self).__init__()
self._log = log
def verbose(self, msg):
super(MyPMSink, self).verbose(msg)
self._log.debug('%s %s', pm, msg)
def info(self, msg):
super(MyPMSink, self).info(msg)
self._log.info('%s %s', pm, msg)
def error(self, msg):
super(MyPMSink, self).error(msg)
self._log.error('%s %s', pm, msg)
return MyPMSink(self.logger)
def _checkForPackagesUpdate(self, packages):
update = []
mpm = self._MiniPM(
sink=self._getSink(),
disabledPlugins=('versionlock',),
)
for package in packages:
with mpm.transaction():
mpm.update(packages=(package,))
if mpm.buildTransaction():
if mpm.queryTransaction():
update.append(package)
return update
def _checkForProductUpdate(self):
# TODO: otopi is now providing minidnf too
missingRollback = []
upgradeAvailable = False
mpm = self._MiniPM(
sink=self._getSink(),
disabledPlugins=('versionlock',),
)
plist = []
with mpm.transaction():
groups = [group['name'] for group in mpm.queryGroups()]
for entry in self.environment[
osetupcons.RPMDistroEnv.PACKAGES_UPGRADE_LIST
]:
if 'group' in entry and entry['group'] in groups:
mpm.updateGroup(group=entry['group'])
else:
mpm.installUpdate(packages=entry['packages'])
if mpm.buildTransaction():
upgradeAvailable = True
for p in mpm.queryTransaction():
self.logger.debug('PACKAGE: [%s] %s' % (
p['operation'],
p['display_name']
))
plist.append(
_(
'PACKAGE: [{operation}] {display_name}'
).format(
operation=p['operation'],
display_name=p['display_name']
)
)
# Verify all installed packages available in yum
for package in mpm.queryTransaction():
installed = False
reinstall_available = False
for query in mpm.queryPackages(
patterns=(package['display_name'],),
showdups=True,
):
self.logger.debug(
'dupes: operation [%s] package %s' % (
query['operation'],
query['display_name'],
)
)
if query['operation'] == 'installed':
installed = True
if query['operation'] == 'reinstall_available':
reinstall_available = True
if installed and not reinstall_available:
missingRollback.append(package['display_name'])
return (upgradeAvailable, set(missingRollback), plist)
def __init__(self, context):
super(Plugin, self).__init__(context=context)
self._shouldResultVersionLock = False
self._enabled = False
self._distribution = platform.linux_distribution(
full_distribution_name=0
)[0]
@plugin.event(
stage=plugin.Stages.STAGE_INIT,
)
def _init(self):
self.environment.setdefault(
osetupcons.RPMDistroEnv.ENABLE_UPGRADE,
None
)
self.environment.setdefault(
osetupcons.RPMDistroEnv.REQUIRE_ROLLBACK,
None
)
self.environment.setdefault(
osetupcons.RPMDistroEnv.VERSION_LOCK_APPLY,
[]
)
self.environment.setdefault(
osetupcons.RPMDistroEnv.VERSION_LOCK_FILTER,
[]
)
self.environment[
osetupcons.RPMDistroEnv.PACKAGES_UPGRADE_LIST
] = []
self.environment[
osetupcons.RPMDistroEnv.PACKAGES_SETUP
] = []
@plugin.event(
stage=plugin.Stages.STAGE_SETUP,
condition=lambda self: (
not self.environment[
osetupcons.CoreEnv.DEVELOPER_MODE
] and
self._distribution in ('redhat', 'fedora', 'centos')
),
)
def _setup(self):
self.command.detect('rpm')
self.environment[otopicons.CoreEnv.MAIN_TRANSACTION].append(
self.VersionLockTransaction(
parent=self,
)
)
if not self.environment[
osetupcons.CoreEnv.OFFLINE_PACKAGER
]:
self._PM, self._MiniPM, self._MiniPMSinkBase = (
osetuputil.getPackageManager(self.logger)
)
self._enabled = True
@plugin.event(
stage=plugin.Stages.STAGE_CUSTOMIZATION,
name=osetupcons.Stages.DISTRO_RPM_PACKAGE_UPDATE_CHECK,
before=(
osetupcons.Stages.DIALOG_TITLES_E_PACKAGES,
),
after=(
osetupcons.Stages.DIALOG_TITLES_S_PACKAGES,
),
condition=lambda self: self._enabled,
)
def _customization(self):
# assume we have nothing to do
self._enabled = False
upgradeAvailable = None
missingRollback = None
if self.environment[osetupcons.RPMDistroEnv.ENABLE_UPGRADE] is None:
self.logger.info(_('Checking for product updates...'))
(
upgradeAvailable,
missingRollback,
plist,
) = self._checkForProductUpdate()
if not upgradeAvailable:
self.logger.info(_('No product updates found'))
else:
self.environment[
osetupcons.RPMDistroEnv.ENABLE_UPGRADE
] = dialog.queryBoolean(
dialog=self.dialog,
name='OVESETUP_RPMDISTRO_PACKAGE_UPGRADE',
note=_(
'Setup has found updates for some packages:\n'
'{plist}\n'
'do you wish to update them now? '
'(@VALUES@) [@DEFAULT@]: '
).format(
plist='\n'.join(plist)
),
prompt=True,
true=_('Yes'),
false=_('No'),
default=True,
)
if self.environment[osetupcons.RPMDistroEnv.ENABLE_UPGRADE]:
self.logger.info(_('Checking for an update for Setup...'))
update = self._checkForPackagesUpdate(
packages=self.environment[
osetupcons.RPMDistroEnv.PACKAGES_SETUP
],
)
if update:
self.dialog.note(
text=_(
'An update for the Setup packages {packages} was '
'found. Please update that package by running:\n'
'"{pm} update {packages}"\nand then execute Setup '
'again.'
).format(
pm=self._PM.lower(),
packages=' '.join(update),
),
)
raise RuntimeError(_('Please update the Setup packages'))
if upgradeAvailable is None:
(
upgradeAvailable,
missingRollback,
plist,
) = self._checkForProductUpdate()
if not upgradeAvailable:
self.dialog.note(text=_('No update for Setup found'))
else:
if missingRollback:
if self.environment[
osetupcons.RPMDistroEnv.REQUIRE_ROLLBACK
] is None:
self.environment[
osetupcons.RPMDistroEnv.REQUIRE_ROLLBACK
] = dialog.queryBoolean(
dialog=self.dialog,
name='OVESETUP_RPMDISTRO_REQUIRE_ROLLBACK',
note=_(
'Setup will not be able to rollback new '
'packages in case of a failure, because '
'the following installed packages were not '
'found in enabled repositories:\n\n'
'{missingRollback}\n'
'Do you want to abort Setup? '
'(@VALUES@) [@DEFAULT@]: '
).format(
missingRollback='\n'.join(
list(missingRollback)
),
),
prompt=True,
true=_('Yes'),
false=_('No'),
default=True,
)
if self.environment[
osetupcons.RPMDistroEnv.REQUIRE_ROLLBACK
]:
raise RuntimeError(
_('Package rollback information is unavailable')
)
#
# Disable yum rollback on transaction failure
# as rhel yum will remove packages that were updated
# without installing previous ones.
#
self.environment[
otopicons.PackEnv.YUM_ROLLBACK
] = False
self._enabled = self.environment[
osetupcons.RPMDistroEnv.ENABLE_UPGRADE
]
if not self._enabled and upgradeAvailable:
raise RuntimeError(
_('Aborted, packages must be updated')
)
@plugin.event(
stage=plugin.Stages.STAGE_PACKAGES,
condition=lambda self: self._enabled,
)
def packages(self):
groups = [group['name'] for group in self.packager.queryGroups()]
for entry in self.environment[
osetupcons.RPMDistroEnv.PACKAGES_UPGRADE_LIST
]:
if 'group' in entry and entry['group'] in groups:
self.packager.updateGroup(group=entry['group'])
else:
self.packager.installUpdate(packages=entry['packages'])
# vim: expandtab tabstop=4 shiftwidth=4
| apache-2.0 | -3,213,299,727,158,135,000 | 35.567623 | 76 | 0.467974 | false |
zxjzxj9/FlaskBoard | web/lib/python2.7/site-packages/psycopg2/tests/testconfig.py | 10 | 1249 | # Configure the test suite from the env variables.
import os
dbname = os.environ.get('PSYCOPG2_TESTDB', 'psycopg2_test')
dbhost = os.environ.get('PSYCOPG2_TESTDB_HOST', None)
dbport = os.environ.get('PSYCOPG2_TESTDB_PORT', None)
dbuser = os.environ.get('PSYCOPG2_TESTDB_USER', None)
dbpass = os.environ.get('PSYCOPG2_TESTDB_PASSWORD', None)
repl_dsn = os.environ.get('PSYCOPG2_TEST_REPL_DSN',
"dbname=psycopg2_test replication=1")
# Check if we want to test psycopg's green path.
green = os.environ.get('PSYCOPG2_TEST_GREEN', None)
if green:
if green == '1':
from psycopg2.extras import wait_select as wait_callback
elif green == 'eventlet':
from eventlet.support.psycopg2_patcher import eventlet_wait_callback \
as wait_callback
else:
raise ValueError("please set 'PSYCOPG2_TEST_GREEN' to a valid value")
import psycopg2.extensions
psycopg2.extensions.set_wait_callback(wait_callback)
# Construct a DSN to connect to the test database:
dsn = 'dbname=%s' % dbname
if dbhost is not None:
dsn += ' host=%s' % dbhost
if dbport is not None:
dsn += ' port=%s' % dbport
if dbuser is not None:
dsn += ' user=%s' % dbuser
if dbpass is not None:
dsn += ' password=%s' % dbpass
| apache-2.0 | 7,439,127,112,395,895,000 | 33.694444 | 78 | 0.690152 | false |
dayatz/taiga-back | taiga/users/gravatar.py | 1 | 1419 | # -*- coding: utf-8 -*-
# Copyright (C) 2014-2017 Andrey Antukh <[email protected]>
# Copyright (C) 2014-2017 Jesús Espino <[email protected]>
# Copyright (C) 2014-2017 David Barragán <[email protected]>
# Copyright (C) 2014-2017 Alejandro Alonso <[email protected]>
# Copyright (C) 2014-2017 Anler Hernández <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import hashlib
def get_gravatar_id(email: str) -> str:
"""Get the gravatar id associated to an email.
:return: Gravatar id.
"""
return hashlib.md5(email.lower().encode()).hexdigest()
def get_user_gravatar_id(user: object) -> str:
"""Get the gravatar id associated to a user.
:return: Gravatar id.
"""
if user and user.email:
return get_gravatar_id(user.email)
return None
| agpl-3.0 | -8,107,461,020,138,980,000 | 35.307692 | 74 | 0.721045 | false |
JakeBrand/CMPUT410-E6 | v1/lib/python2.7/site-packages/django/contrib/gis/db/backends/mysql/schema.py | 57 | 3048 | import logging
from django.contrib.gis.db.models.fields import GeometryField
from django.db.utils import OperationalError
from django.db.backends.mysql.schema import DatabaseSchemaEditor
logger = logging.getLogger('django.contrib.gis')
class MySQLGISSchemaEditor(DatabaseSchemaEditor):
sql_add_spatial_index = 'CREATE SPATIAL INDEX %(index)s ON %(table)s(%(column)s)'
sql_drop_spatial_index = 'DROP INDEX %(index)s ON %(table)s'
def __init__(self, *args, **kwargs):
super(MySQLGISSchemaEditor, self).__init__(*args, **kwargs)
self.geometry_sql = []
def skip_default(self, field):
return (
super(MySQLGISSchemaEditor, self).skip_default(field) or
# Geometry fields are stored as BLOB/TEXT and can't have defaults.
isinstance(field, GeometryField)
)
def column_sql(self, model, field, include_default=False):
column_sql = super(MySQLGISSchemaEditor, self).column_sql(model, field, include_default)
# MySQL doesn't support spatial indexes on NULL columns
if isinstance(field, GeometryField) and field.spatial_index and not field.null:
qn = self.connection.ops.quote_name
db_table = model._meta.db_table
self.geometry_sql.append(
self.sql_add_spatial_index % {
'index': qn(self._create_spatial_index_name(model, field)),
'table': qn(db_table),
'column': qn(field.column),
}
)
return column_sql
def create_model(self, model):
super(MySQLGISSchemaEditor, self).create_model(model)
self.create_spatial_indexes()
def add_field(self, model, field):
super(MySQLGISSchemaEditor, self).add_field(model, field)
self.create_spatial_indexes()
def remove_field(self, model, field):
if isinstance(field, GeometryField) and field.spatial_index:
qn = self.connection.ops.quote_name
sql = self.sql_drop_spatial_index % {
'index': qn(self._create_spatial_index_name(model, field)),
'table': qn(model._meta.db_table),
}
try:
self.execute(sql)
except OperationalError:
logger.error(
"Couldn't remove spatial index: %s (may be expected "
"if your storage engine doesn't support them)." % sql
)
super(MySQLGISSchemaEditor, self).remove_field(model, field)
def _create_spatial_index_name(self, model, field):
return '%s_%s_id' % (model._meta.db_table, field.column)
def create_spatial_indexes(self):
for sql in self.geometry_sql:
try:
self.execute(sql)
except OperationalError:
logger.error(
"Cannot create SPATIAL INDEX %s. Only MyISAM and (as of "
"MySQL 5.7.5) InnoDB support them." % sql
)
self.geometry_sql = []
| apache-2.0 | -6,236,837,184,736,648,000 | 38.584416 | 96 | 0.595472 | false |
aclevy/vcrpy | vcr/request.py | 3 | 3068 | from six import BytesIO, text_type
from six.moves.urllib.parse import urlparse, parse_qsl
class Request(object):
"""
VCR's representation of a request.
There is a weird quirk in HTTP. You can send the same header twice. For
this reason, headers are represented by a dict, with lists as the values.
However, it appears that HTTPlib is completely incapable of sending the
same header twice. This puts me in a weird position: I want to be able to
accurately represent HTTP headers in cassettes, but I don't want the extra
step of always having to do [0] in the general case, i.e.
request.headers['key'][0]
In addition, some servers sometimes send the same header more than once,
and httplib *can* deal with this situation.
Futhermore, I wanted to keep the request and response cassette format as
similar as possible.
For this reason, in cassettes I keep a dict with lists as keys, but once
deserialized into VCR, I keep them as plain, naked dicts.
"""
def __init__(self, method, uri, body, headers):
self.method = method
self.uri = uri
self._was_file = hasattr(body, 'read')
if self._was_file:
self.body = body.read()
else:
self.body = body
self.headers = {}
for key in headers:
self.add_header(key, headers[key])
@property
def body(self):
return BytesIO(self._body) if self._was_file else self._body
@body.setter
def body(self, value):
if isinstance(value, text_type):
value = value.encode('utf-8')
self._body = value
def add_header(self, key, value):
# see class docstring for an explanation
if isinstance(value, (tuple, list)):
self.headers[key] = value[0]
else:
self.headers[key] = value
@property
def scheme(self):
return urlparse(self.uri).scheme
@property
def host(self):
return urlparse(self.uri).hostname
@property
def port(self):
parse_uri = urlparse(self.uri)
port = parse_uri.port
if port is None:
port = {'https': 443, 'http': 80}[parse_uri.scheme]
return port
@property
def path(self):
return urlparse(self.uri).path
@property
def query(self):
q = urlparse(self.uri).query
return sorted(parse_qsl(q))
# alias for backwards compatibility
@property
def url(self):
return self.uri
# alias for backwards compatibility
@property
def protocol(self):
return self.scheme
def __str__(self):
return "<Request ({0}) {1}>".format(self.method, self.uri)
def __repr__(self):
return self.__str__()
def _to_dict(self):
return {
'method': self.method,
'uri': self.uri,
'body': self.body,
'headers': dict(((k, [v]) for k, v in self.headers.items())),
}
@classmethod
def _from_dict(cls, dct):
return Request(**dct)
| mit | 4,881,428,995,260,085,000 | 27.672897 | 78 | 0.602021 | false |
DailyActie/Surrogate-Model | 01-codes/scipy-master/scipy/special/lambertw.py | 33 | 3041 | from __future__ import division, print_function, absolute_import
from ._ufuncs import _lambertw
def lambertw(z, k=0, tol=1e-8):
r"""
lambertw(z, k=0, tol=1e-8)
Lambert W function.
The Lambert W function `W(z)` is defined as the inverse function
of ``w * exp(w)``. In other words, the value of ``W(z)`` is
such that ``z = W(z) * exp(W(z))`` for any complex number
``z``.
The Lambert W function is a multivalued function with infinitely
many branches. Each branch gives a separate solution of the
equation ``z = w exp(w)``. Here, the branches are indexed by the
integer `k`.
Parameters
----------
z : array_like
Input argument.
k : int, optional
Branch index.
tol : float, optional
Evaluation tolerance.
Returns
-------
w : array
`w` will have the same shape as `z`.
Notes
-----
All branches are supported by `lambertw`:
* ``lambertw(z)`` gives the principal solution (branch 0)
* ``lambertw(z, k)`` gives the solution on branch `k`
The Lambert W function has two partially real branches: the
principal branch (`k = 0`) is real for real ``z > -1/e``, and the
``k = -1`` branch is real for ``-1/e < z < 0``. All branches except
``k = 0`` have a logarithmic singularity at ``z = 0``.
**Possible issues**
The evaluation can become inaccurate very close to the branch point
at ``-1/e``. In some corner cases, `lambertw` might currently
fail to converge, or can end up on the wrong branch.
**Algorithm**
Halley's iteration is used to invert ``w * exp(w)``, using a first-order
asymptotic approximation (O(log(w)) or `O(w)`) as the initial estimate.
The definition, implementation and choice of branches is based on [2]_.
See Also
--------
wrightomega : the Wright Omega function
References
----------
.. [1] http://en.wikipedia.org/wiki/Lambert_W_function
.. [2] Corless et al, "On the Lambert W function", Adv. Comp. Math. 5
(1996) 329-359.
http://www.apmaths.uwo.ca/~djeffrey/Offprints/W-adv-cm.pdf
Examples
--------
The Lambert W function is the inverse of ``w exp(w)``:
>>> from scipy.special import lambertw
>>> w = lambertw(1)
>>> w
(0.56714329040978384+0j)
>>> w * np.exp(w)
(1.0+0j)
Any branch gives a valid inverse:
>>> w = lambertw(1, k=3)
>>> w
(-2.8535817554090377+17.113535539412148j)
>>> w*np.exp(w)
(1.0000000000000002+1.609823385706477e-15j)
**Applications to equation-solving**
The Lambert W function may be used to solve various kinds of
equations, such as finding the value of the infinite power
tower :math:`z^{z^{z^{\ldots}}}`:
>>> def tower(z, n):
... if n == 0:
... return z
... return z ** tower(z, n-1)
...
>>> tower(0.5, 100)
0.641185744504986
>>> -lambertw(-np.log(0.5)) / np.log(0.5)
(0.64118574450498589+0j)
"""
return _lambertw(z, k, tol)
| mit | -2,097,089,116,538,274,300 | 27.420561 | 76 | 0.598159 | false |
MjnMixael/knossos | knossos/__main__.py | 1 | 3121 | #!/usr/bin/python
## Copyright 2017 Knossos authors, see NOTICE file
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
from __future__ import absolute_import, print_function
import sys
if __package__ is None and not hasattr(sys, 'frozen'):
import os.path
path = os.path.realpath(os.path.abspath(__file__))
sys.path.insert(0, os.path.dirname(os.path.dirname(path)))
if len(sys.argv) > 1 and sys.argv[1] == '--cpuinfo':
# We don't need to initialize knossos if we only need to fetch the CPU info.
import json
from knossos.third_party import cpuinfo
info = None
try:
info = cpuinfo.get_cpu_info()
except Exception:
from knossos.launcher import logging
logging.exception('Failed to retrieve CPU info.')
print(json.dumps(info))
elif len(sys.argv) > 1 and sys.argv[1] == '--run-cpuid':
from knossos.third_party import cpuinfo
print(cpuinfo._actual_get_cpu_info_from_cpuid())
elif len(sys.argv) > 1 and sys.argv[1] == '--deviceinfo':
import json
from knossos import clibs
clibs.init_sdl()
clibs.init_openal()
if clibs.can_detect_audio():
audio_devs = clibs.list_audio_devs()
else:
audio_devs = None
print(json.dumps({
'modes': clibs.get_modes(),
'audio_devs': audio_devs,
'joysticks': clibs.list_joysticks()
}))
elif len(sys.argv) > 1 and sys.argv[1] == '--fso-config-path':
from knossos import clibs
clibs.init_sdl()
print(clibs.get_config_path())
elif len(sys.argv) > 1 and sys.argv[1] == '--lib-paths':
import json
from knossos import clibs, center
if len(sys.argv) > 3:
if sys.argv[2] == 'auto':
center.settings['sdl2_path'] = None
else:
center.settings['sdl2_path'] = sys.argv[2]
if sys.argv[3] == 'auto':
center.settings['openal_path'] = None
else:
center.settings['openal_path'] = sys.argv[3]
try:
clibs.init_sdl()
except Exception:
clibs.sdl = None
try:
clibs.init_openal()
except Exception:
clibs.acl = None
if center.settings['sdl2_path'] and clibs.sdl:
if clibs.sdl._name != center.settings['sdl2_path']:
clibs.sdl = None
if center.settings['openal_path'] and clibs.alc:
if clibs.alc._name != center.settings['openal_path']:
clibs.alc = None
print(json.dumps({
'sdl2': clibs.sdl._name if clibs.sdl else None,
'openal': clibs.alc._name if clibs.alc else None
}))
else:
from knossos import launcher
launcher.main()
| apache-2.0 | -3,226,768,737,397,734,000 | 28.72381 | 80 | 0.63249 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.