code
stringlengths
2
1.05M
repo_name
stringlengths
5
104
path
stringlengths
4
251
language
stringclasses
1 value
license
stringclasses
15 values
size
int32
2
1.05M
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import unittest import zipfile import config import store import common from db import db_session, Source import crypto_util # Set environment variable so config.py uses a test environment os.environ['SECUREDROP_ENV'] = 'test' class TestStore(unittest.TestCase): """The set of tests for store.py.""" def setUp(self): common.shared_setup() def tearDown(self): common.shared_teardown() def test_verify(self): with self.assertRaises(store.PathException): store.verify(os.path.join(config.STORE_DIR, '..', 'etc', 'passwd')) with self.assertRaises(store.PathException): store.verify(config.STORE_DIR + "_backup") def test_get_zip(self): sid = 'EQZGCJBRGISGOTC2NZVWG6LILJBHEV3CINNEWSCLLFTUWZJPKJFECLS2NZ4G4U3QOZCFKTTPNZMVIWDCJBBHMUDBGFHXCQ3R' source = Source(sid, crypto_util.display_id()) db_session.add(source) db_session.commit() files = ['1-abc1-msg.gpg', '2-abc2-msg.gpg'] filenames = common.setup_test_docs(sid, files) archive = zipfile.ZipFile(store.get_bulk_archive(filenames)) archivefile_contents = archive.namelist() for archived_file, actual_file in zip(archivefile_contents, filenames): actual_file_content = open(actual_file).read() zipped_file_content = archive.read(archived_file) self.assertEquals(zipped_file_content, actual_file_content) if __name__ == "__main__": unittest.main(verbosity=2)
mark-in/securedrop-app-code
tests/test_unit_store.py
Python
agpl-3.0
1,563
"""Model managers for Reversion.""" try: set except NameError: from sets import Set as set # Python 2.3 fallback. from django.contrib.contenttypes.models import ContentType from django.db import models class VersionManager(models.Manager): """Manager for Version models.""" def get_for_object(self, object): """Returns all the versions of the given Revision, ordered by date created.""" content_type = ContentType.objects.get_for_model(object) return self.filter(content_type=content_type, object_id=unicode(object.pk)).order_by("pk").select_related().order_by("pk") def get_unique_for_object(self,obj): """Returns unique versions associated with the object.""" versions = self.get_for_object(obj) changed_versions = [] known_serialized_data = set() for version in versions: serialized_data = version.serialized_data if serialized_data in known_serialized_data: continue known_serialized_data.add(serialized_data) changed_versions.append(version) return changed_versions def get_for_date(self, object, date): """Returns the latest version of an object for the given date.""" try: return self.get_for_object(object).filter(revision__date_created__lte=date).order_by("-pk")[0] except IndexError: raise self.model.DoesNotExist def get_deleted(self, model_class): """Returns all the deleted versions for the given model class.""" live_ids = [unicode(row[0]) for row in model_class._default_manager.all().values_list("pk")] content_type = ContentType.objects.get_for_model(model_class) deleted_ids = self.filter(content_type=content_type).exclude(object_id__in=live_ids).order_by().values_list("object_id").distinct() deleted = [] for object_id, in deleted_ids: deleted.append(self.get_deleted_object(model_class, object_id)) return deleted def get_deleted_object(self, model_class, object_id): """ Returns the version corresponding to the deletion of the object with the given id. """ try: content_type = ContentType.objects.get_for_model(model_class) return self.filter(content_type=content_type, object_id=unicode(object_id)).order_by("-pk").select_related()[0] except IndexError: raise self.model.DoesNotExist
jaredjennings/snowy
wsgi/snowy/snowy/lib/reversion/managers.py
Python
agpl-3.0
2,511
#!/usr/bin/env python """ Copyright (c) 2004-Present Pivotal Software, Inc. This program and the accompanying materials are made available under the terms of the under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from gppylib.commands.base import Command from tinctest import logger from mpp.lib.PSQL import PSQL from mpp.models import MPPTestCase import os import re import socket import time import shutil import sys import signal class aoreadmemory(MPPTestCase): def tearDown(self): gpfaultinjector = Command('fault injector', 'source $GPHOME/greenplum_path.sh; ' 'gpfaultinjector -f malloc_failure ' '-y reset -H ALL -r primary') gpfaultinjector.run() def test_ao_malloc_failure(self): """ @product_version gpdb: [4.3.5.1 -] """ PSQL.run_sql_command('DROP table if exists ao_read_malloc') PSQL.run_sql_command('create table ao_read_malloc (a int) with (appendonly=true, compresstype=quicklz)') PSQL.run_sql_command('insert into ao_read_malloc ' 'select * from generate_series(1, 1000)') gpfaultinjector = Command('fault injector', 'source $GPHOME/greenplum_path.sh; ' 'gpfaultinjector -f malloc_failure ' '-y error -H ALL -r primary') gpfaultinjector.run() res ={'rc':0, 'stdout':'', 'stderr':''} PSQL.run_sql_command(sql_cmd='select count(*) from ao_read_malloc', results=res) logger.info(res) self.assertTrue("ERROR: fault triggered" in res['stderr']) self.assertFalse("ERROR: could not temporarily connect to one or more segments" in res['stderr']) logger.info('Pass')
cjcjameson/gpdb
src/test/tinc/tincrepo/mpp/gpdb/tests/storage/access_methods/ao_memory.py
Python
apache-2.0
2,282
import os import pytest from pyleus.cli.storm_cluster import _get_storm_cmd_env from pyleus.cli.storm_cluster import STORM_JAR_JVM_OPTS from pyleus.cli.storm_cluster import StormCluster from pyleus.cli.storm_cluster import TOPOLOGY_BUILDER_CLASS from pyleus.testing import mock class TestGetStormCmdEnd(object): @pytest.fixture(autouse=True) def mock_os_environ(self, monkeypatch): monkeypatch.setattr(os, 'environ', {}) def test_jvm_opts_unset(self): assert _get_storm_cmd_env(None) is None def test_jvm_opts_set(self): jvm_opts = "-Dfoo=bar" env = _get_storm_cmd_env(jvm_opts) assert env[STORM_JAR_JVM_OPTS] == jvm_opts class TestStormCluster(object): @pytest.fixture def cluster(self): return StormCluster( mock.sentinel.storm_cmd_path, mock.sentinel.nimbus_host, mock.sentinel.nimbus_port, mock.sentinel.verbose, mock.sentinel.jvm_opts, ) def test__build_storm_cmd_no_port(self, cluster): cluster.nimbus_host = "test-host" cluster.nimbus_port = None storm_cmd = cluster._build_storm_cmd(["a", "cmd"]) assert storm_cmd == [mock.sentinel.storm_cmd_path, "a", "cmd", "-c", "nimbus.host=test-host"] def test__build_storm_cmd_with_port(self, cluster): cluster.nimbus_host = "test-host" cluster.nimbus_port = 4321 storm_cmd = cluster._build_storm_cmd(["another", "cmd"]) assert storm_cmd == [mock.sentinel.storm_cmd_path, "another", "cmd", "-c", "nimbus.host=test-host", "-c", "nimbus.thrift.port=4321"] def test_submit(self, cluster): with mock.patch.object(cluster, '_exec_storm_cmd', autospec=True) as mock_exec: cluster.submit(mock.sentinel.jar_path) mock_exec.assert_called_once_with(["jar", mock.sentinel.jar_path, TOPOLOGY_BUILDER_CLASS])
jirafe/pyleus
tests/cli/storm_cluster_test.py
Python
apache-2.0
1,989
from math import exp from collections import defaultdict @outputSchema("scaled: double") def logistic_scale(val, logistic_param): return -1.0 + 2.0 / (1.0 + exp(-logistic_param * val)) @outputSchema("t: (item_A, item_B, dist: double, raw_weight: double)") def best_path(paths): return sorted(paths, key=lambda t:t[2])[0] @outputSchema("t: (item_A, item_B, dist: double, raw_weight: double, link_data: map[], linking_item: chararray)") def best_path_detailed(paths): return sorted(paths, key=lambda t:t[2])[0] @outputSchema("signal_map:map[]") def aggregate_signal_types(signal_list): signal_dict = {} for row in signal_list: if row[3]: if not signal_dict.get(row[3]): signal_dict[row[3]] = 0 signal_dict[row[3]] += 1 return signal_dict @outputSchema("signal_map:map[]") def combine_signals(signal_list): signal_dict = {} for row in signal_list: if row[3]: for val in row[3].keys(): if not signal_dict.get(row[3]): signal_dict[row[3]] = 0 signal_dict[val] += row[3][val] return signal_dict
ShadySQL/mortar-recsys
udfs/jython/recsys.py
Python
apache-2.0
1,151
# definitions which are not being deprecated from wagtail.admin.forms from .models import ( # NOQA DIRECT_FORM_FIELD_OVERRIDES, FORM_FIELD_OVERRIDES, WagtailAdminModelForm, WagtailAdminModelFormMetaclass, formfield_for_dbfield) from .pages import WagtailAdminPageForm # NOQA
zerolab/wagtail
wagtail/admin/forms/__init__.py
Python
bsd-3-clause
285
# -*- coding: utf-8 -*- # Generated by Django 1.11.6 on 2017-10-18 12:59 from django.db import migrations, models import django.db.models.deletion import taggit.managers import wagtail.search.index class Migration(migrations.Migration): initial = True dependencies = [ ('taggit', '0002_auto_20150616_2121'), ] operations = [ migrations.CreateModel( name='Author', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ('date_of_birth', models.DateField(null=True)), ], bases=(models.Model, wagtail.search.index.Indexed), ), migrations.CreateModel( name='Book', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255)), ('publication_date', models.DateField()), ('number_of_pages', models.IntegerField()), ], bases=(models.Model, wagtail.search.index.Indexed), ), migrations.CreateModel( name='Character', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='Novel', fields=[ ('book_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='searchtests.Book')), ('setting', models.CharField(max_length=255)), ('protagonist', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='searchtests.Character')), ], bases=('searchtests.book',), ), migrations.CreateModel( name='ProgrammingGuide', fields=[ ('book_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='searchtests.Book')), ('programming_language', models.CharField(choices=[('py', 'Python'), ('js', 'JavaScript'), ('rs', 'Rust')], max_length=255)), ], bases=('searchtests.book',), ), migrations.AddField( model_name='book', name='authors', field=models.ManyToManyField(related_name='books', to='searchtests.Author'), ), migrations.AddField( model_name='book', name='tags', field=taggit.managers.TaggableManager(help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags'), ), migrations.AddField( model_name='character', name='novel', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='characters', to='searchtests.Novel'), ), ]
kaedroho/wagtail
wagtail/tests/search/migrations/0001_initial.py
Python
bsd-3-clause
3,247
#!/usr/bin/env python from nose.tools import * from networkx import * from networkx.generators.random_graphs import * class TestGeneratorsRandom(): def smoke_test_random_graph(self): seed = 42 G=gnp_random_graph(100,0.25,seed) G=binomial_graph(100,0.25,seed) G=erdos_renyi_graph(100,0.25,seed) G=fast_gnp_random_graph(100,0.25,seed) G=gnm_random_graph(100,20,seed) G=dense_gnm_random_graph(100,20,seed) G=watts_strogatz_graph(10,2,0.25,seed) assert_equal(len(G), 10) assert_equal(G.number_of_edges(), 10) G=connected_watts_strogatz_graph(10,2,0.1,seed) assert_equal(len(G), 10) assert_equal(G.number_of_edges(), 10) G=watts_strogatz_graph(10,4,0.25,seed) assert_equal(len(G), 10) assert_equal(G.number_of_edges(), 20) G=newman_watts_strogatz_graph(10,2,0.0,seed) assert_equal(len(G), 10) assert_equal(G.number_of_edges(), 10) G=newman_watts_strogatz_graph(10,4,0.25,seed) assert_equal(len(G), 10) assert_true(G.number_of_edges() >= 20) G=barabasi_albert_graph(100,1,seed) G=barabasi_albert_graph(100,3,seed) assert_equal(G.number_of_edges(),(97*3)) G=powerlaw_cluster_graph(100,1,1.0,seed) G=powerlaw_cluster_graph(100,3,0.0,seed) assert_equal(G.number_of_edges(),(97*3)) G=duplication_divergence_graph(100,1.0,seed) assert_equal(len(G), 100) assert_raises(networkx.exception.NetworkXError, duplication_divergence_graph, 100, 2) assert_raises(networkx.exception.NetworkXError, duplication_divergence_graph, 100, -1) G=random_regular_graph(10,20,seed) assert_raises(networkx.exception.NetworkXError, random_regular_graph, 3, 21) constructor=[(10,20,0.8),(20,40,0.8)] G=random_shell_graph(constructor,seed) G=nx.random_lobster(10,0.1,0.5,seed) def test_random_zero_regular_graph(self): """Tests that a 0-regular graph has the correct number of nodes and edges. """ G = random_regular_graph(0, 10) assert_equal(len(G), 10) assert_equal(sum(1 for _ in G.edges()), 0) def test_gnp(self): for generator in [gnp_random_graph, binomial_graph, erdos_renyi_graph, fast_gnp_random_graph]: G = generator(10, -1.1) assert_equal(len(G), 10) assert_equal(sum(1 for _ in G.edges()), 0) G = generator(10, 0.1) assert_equal(len(G), 10) G = generator(10, 0.1, seed=42) assert_equal(len(G), 10) G = generator(10, 1.1) assert_equal(len(G), 10) assert_equal(sum(1 for _ in G.edges()), 45) G = generator(10, -1.1, directed=True) assert_true(G.is_directed()) assert_equal(len(G), 10) assert_equal(sum(1 for _ in G.edges()), 0) G = generator(10, 0.1, directed=True) assert_true(G.is_directed()) assert_equal(len(G), 10) G = generator(10, 1.1, directed=True) assert_true(G.is_directed()) assert_equal(len(G), 10) assert_equal(sum(1 for _ in G.edges()), 90) # assert that random graphs generate all edges for p close to 1 edges = 0 runs = 100 for i in range(runs): edges += sum(1 for _ in generator(10, 0.99999, directed=True).edges()) assert_almost_equal(edges/float(runs), 90, delta=runs*2.0/100) def test_gnm(self): G=gnm_random_graph(10,3) assert_equal(len(G),10) assert_equal(sum(1 for _ in G.edges()), 3) G=gnm_random_graph(10,3,seed=42) assert_equal(len(G),10) assert_equal(sum(1 for _ in G.edges()), 3) G=gnm_random_graph(10,100) assert_equal(len(G),10) assert_equal(sum(1 for _ in G.edges()), 45) G=gnm_random_graph(10,100,directed=True) assert_equal(len(G),10) assert_equal(sum(1 for _ in G.edges()),90) G=gnm_random_graph(10,-1.1) assert_equal(len(G),10) assert_equal(sum(1 for _ in G.edges()),0) def test_watts_strogatz_big_k(self): assert_raises(networkx.exception.NetworkXError, watts_strogatz_graph, 10, 10, 0.25) assert_raises(networkx.exception.NetworkXError, newman_watts_strogatz_graph, 10, 10, 0.25) # could create an infinite loop, now doesn't # infinite loop used to occur when a node has degree n-1 and needs to rewire watts_strogatz_graph(10, 9, 0.25, seed=0) newman_watts_strogatz_graph(10, 9, 0.5, seed=0)
beni55/networkx
networkx/generators/tests/test_random_graphs.py
Python
bsd-3-clause
4,826
from __future__ import unicode_literals from .. import Provider as PhoneNumberProvider class Provider(PhoneNumberProvider): formats = ( # Mobile # Government website: http://www.uke.gov.pl/numeracja-843 '50# ### ###', '51# ### ###', '53# ### ###', '57# ### ###', '60# ### ###', '66# ### ###', '69# ### ###', '72# ### ###', '73# ### ###', '78# ### ###', '79# ### ###', '88# ### ###', '+48 50# ### ###', '+48 51# ### ###', '+48 53# ### ###', '+48 57# ### ###', '+48 60# ### ###', '+48 66# ### ###', '+48 69# ### ###', '+48 72# ### ###', '+48 73# ### ###', '+48 78# ### ###', '+48 79# ### ###', '+48 88# ### ###', '32 ### ## ##', '+48 32 ### ## ##', '22 ### ## ##', '+48 22 ### ## ##', )
deanishe/alfred-fakeum
src/libs/faker/providers/phone_number/pl_PL/__init__.py
Python
mit
937
# -*- coding: utf-8 -*- """ *************************************************************************** lasoverlapPro.py --------------------- Date : October 2014 Copyright : (C) 2014 by Martin Isenburg Email : martin near rapidlasso point com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Martin Isenburg' __date__ = 'October 2014' __copyright__ = '(C) 2014, Martin Isenburg' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import os from LAStoolsUtils import LAStoolsUtils from LAStoolsAlgorithm import LAStoolsAlgorithm from processing.core.parameters import ParameterBoolean from processing.core.parameters import ParameterNumber from processing.core.parameters import ParameterSelection class lasoverlapPro(LAStoolsAlgorithm): CHECK_STEP = "CHECK_STEP" ATTRIBUTE = "ATTRIBUTE" OPERATION = "OPERATION" ATTRIBUTES = ["elevation", "intensity", "number_of_returns", "scan_angle_abs", "density"] OPERATIONS = ["lowest", "highest", "average"] CREATE_OVERLAP_RASTER = "CREATE_OVERLAP_RASTER" CREATE_DIFFERENCE_RASTER = "CREATE_DIFFERENCE_RASTER" def defineCharacteristics(self): self.name = "lasoverlapPro" self.group = "LAStools Production" self.addParametersPointInputFolderGUI() self.addParametersFilesAreFlightlinesGUI() self.addParametersFilter1ReturnClassFlagsGUI() self.addParameter(ParameterNumber(lasoverlapPro.CHECK_STEP, self.tr("size of grid used for overlap check"), 0, None, 2.0)) self.addParameter(ParameterSelection(lasoverlapPro.ATTRIBUTE, self.tr("attribute to check"), lasoverlapPro.ATTRIBUTES, 0)) self.addParameter(ParameterSelection(lasoverlapPro.OPERATION, self.tr("operation on attribute per cell"), lasoverlapPro.OPERATIONS, 0)) self.addParameter(ParameterBoolean(lasoverlapPro.CREATE_OVERLAP_RASTER, self.tr("create overlap raster"), True)) self.addParameter(ParameterBoolean(lasoverlapPro.CREATE_DIFFERENCE_RASTER, self.tr("create difference raster"), True)) self.addParametersOutputDirectoryGUI() self.addParametersOutputAppendixGUI() self.addParametersRasterOutputFormatGUI() self.addParametersRasterOutputGUI() self.addParametersAdditionalGUI() self.addParametersCoresGUI() self.addParametersVerboseGUI() def processAlgorithm(self, progress): commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "lasoverlap")] self.addParametersVerboseCommands(commands) self.addParametersPointInputFolderCommands(commands) self.addParametersFilesAreFlightlinesCommands(commands) self.addParametersFilter1ReturnClassFlagsCommands(commands) step = self.getParameterValue(lasoverlapPro.CHECK_STEP) if step != 0.0: commands.append("-step") commands.append(str(step)) commands.append("-values") attribute = self.getParameterValue(lasoverlapPro.ATTRIBUTE) if attribute != 0: commands.append("-" + lasoverlapPro.ATTRIBUTES[attribute]) operation = self.getParameterValue(lasoverlapPro.OPERATION) if operation != 0: commands.append("-" + lasoverlapPro.OPERATIONS[operation]) if not self.getParameterValue(lasoverlapPro.CREATE_OVERLAP_RASTER): commands.append("-no_over") if not self.getParameterValue(lasoverlapPro.CREATE_DIFFERENCE_RASTER): commands.append("-no_diff") self.addParametersOutputDirectoryCommands(commands) self.addParametersOutputAppendixCommands(commands) self.addParametersRasterOutputFormatCommands(commands) self.addParametersRasterOutputCommands(commands) self.addParametersAdditionalCommands(commands) self.addParametersCoresCommands(commands) LAStoolsUtils.runLAStools(commands, progress)
pavlova-marina/QGIS
python/plugins/processing/algs/lidar/lastools/lasoverlapPro.py
Python
gpl-2.0
4,596
## # Copyright 2012-2015 Ghent University # # This file is part of EasyBuild, # originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), # with support of Ghent University (http://ugent.be/hpc), # the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en), # the Hercules foundation (http://www.herculesstichting.be/in_English) # and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). # # http://github.com/hpcugent/easybuild # # EasyBuild is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation v2. # # EasyBuild is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. ## """ Declaration of toolchains.linalg namespace. @author: Stijn De Weirdt (Ghent University) @author: Kenneth Hoste (Ghent University) """ from pkgutil import extend_path # we're not the only ones in this namespace __path__ = extend_path(__path__, __name__) #@ReservedAssignment
pneerincx/easybuild-framework
easybuild/toolchains/linalg/__init__.py
Python
gpl-2.0
1,340
# ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2014, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- import hashlib import itertools import numpy from nupic.bindings.math import Random from nupic.encoders.base import Encoder class CoordinateEncoder(Encoder): """ Given a coordinate in an N-dimensional space, and a radius around that coordinate, the Coordinate Encoder returns an SDR representation of that position. The Coordinate Encoder uses an N-dimensional integer coordinate space. For example, a valid coordinate in this space is (150, -49, 58), whereas an invalid coordinate would be (55.4, -5, 85.8475). It uses the following algorithm: 1. Find all the coordinates around the input coordinate, within the specified radius. 2. For each coordinate, use a uniform hash function to deterministically map it to a real number between 0 and 1. This is the "order" of the coordinate. 3. Of these coordinates, pick the top W by order, where W is the number of active bits desired in the SDR. 4. For each of these W coordinates, use a uniform hash function to deterministically map it to one of the bits in the SDR. Make this bit active. 5. This results in a final SDR with exactly W bits active (barring chance hash collisions). """ def __init__(self, w=21, n=1000, name=None, verbosity=0): """ See `nupic.encoders.base.Encoder` for more information. @param name An optional string which will become part of the description """ # Validate inputs if (w <= 0) or (w % 2 == 0): raise ValueError("w must be an odd positive integer") if (n <= 6 * w) or (not isinstance(n, int)): raise ValueError("n must be an int strictly greater than 6*w. For " "good results we recommend n be strictly greater " "than 11*w") self.w = w self.n = n self.verbosity = verbosity self.encoders = None if name is None: name = "[%s:%s]" % (self.n, self.w) self.name = name def getWidth(self): """See `nupic.encoders.base.Encoder` for more information.""" return self.n def getDescription(self): """See `nupic.encoders.base.Encoder` for more information.""" return [('coordinate', 0), ('radius', 1)] def getScalars(self, inputData): """See `nupic.encoders.base.Encoder` for more information.""" return numpy.array([0]*len(inputData)) def encodeIntoArray(self, inputData, output): """ See `nupic.encoders.base.Encoder` for more information. @param inputData (tuple) Contains coordinate (numpy.array) and radius (float) @param output (numpy.array) Stores encoded SDR in this numpy array """ (coordinate, radius) = inputData neighbors = self._neighbors(coordinate, radius) winners = self._topWCoordinates(neighbors, self.w) bitFn = lambda coordinate: self._bitForCoordinate(coordinate, self.n) indices = numpy.array([bitFn(w) for w in winners]) output[:] = 0 output[indices] = 1 @staticmethod def _neighbors(coordinate, radius): """ Returns coordinates around given coordinate, within given radius. Includes given coordinate. @param coordinate (numpy.array) Coordinate whose neighbors to find @param radius (float) Radius around `coordinate` @return (numpy.array) List of coordinates """ ranges = [range(n-radius, n+radius+1) for n in coordinate.tolist()] return numpy.array(list(itertools.product(*ranges))) @classmethod def _topWCoordinates(cls, coordinates, w): """ Returns the top W coordinates by order. @param coordinates (numpy.array) A 2D numpy array, where each element is a coordinate @param w (int) Number of top coordinates to return @return (numpy.array) A subset of `coordinates`, containing only the top ones by order """ orders = numpy.array([cls._orderForCoordinate(c) for c in coordinates.tolist()]) indices = numpy.argsort(orders)[-w:] return coordinates[indices] @staticmethod def _hashCoordinate(coordinate): """Hash a coordinate to a 64 bit integer.""" coordinateStr = ",".join(str(v) for v in coordinate) # Compute the hash and convert to 64 bit int. hash = int(int(hashlib.md5(coordinateStr).hexdigest(), 16) % (2 ** 64)) return hash @classmethod def _orderForCoordinate(cls, coordinate): """ Returns the order for a coordinate. @param coordinate (numpy.array) Coordinate @return (float) A value in the interval [0, 1), representing the order of the coordinate """ seed = cls._hashCoordinate(coordinate) rng = Random(seed) return rng.getReal64() @classmethod def _bitForCoordinate(cls, coordinate, n): """ Maps the coordinate to a bit in the SDR. @param coordinate (numpy.array) Coordinate @param n (int) The number of available bits in the SDR @return (int) The index to a bit in the SDR """ seed = cls._hashCoordinate(coordinate) rng = Random(seed) return rng.getUInt32(n) def dump(self): print "CoordinateEncoder:" print " w: %d" % self.w print " n: %d" % self.n @classmethod def read(cls, proto): encoder = object.__new__(cls) encoder.w = proto.w encoder.n = proto.n encoder.verbosity = proto.verbosity encoder.name = proto.name return encoder def write(self, proto): proto.w = self.w proto.n = self.n proto.verbosity = self.verbosity proto.name = self.name
david-ragazzi/nupic
nupic/encoders/coordinate.py
Python
gpl-3.0
6,560
# ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2014, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- """ Metric class used in monitor mixin framework. """ import numpy class Metric(object): """ A metric computed over a set of data (usually from a `CountsTrace`). """ def __init__(self, monitor, title, data): """ @param monitor (MonitorMixinBase) Monitor Mixin instance that generated this trace @param title (string) Title @param data (list) List of numbers to compute metric from """ self.monitor = monitor self.title = title self.min = None self.max = None self.sum = None self.mean = None self.standardDeviation = None self._computeStats(data) @staticmethod def createFromTrace(trace, excludeResets=None): data = list(trace.data) if excludeResets is not None: data = [x for i, x in enumerate(trace.data) if not excludeResets.data[i]] return Metric(trace.monitor, trace.title, data) def copy(self): metric = Metric(self.monitor, self.title, []) metric.min = self.min metric.max = self.max metric.sum = self.sum metric.mean = self.mean metric.standardDeviation = self.standardDeviation return metric def prettyPrintTitle(self): return ("[{0}] {1}".format(self.monitor.mmName, self.title) if self.monitor.mmName is not None else self.title) def _computeStats(self, data): if not len(data): return self.min = min(data) self.max = max(data) self.sum = sum(data) self.mean = numpy.mean(data) self.standardDeviation = numpy.std(data) def getStats(self, sigFigs=7): if self.mean is None: return [None, None, None, None, None] return [round(self.mean, sigFigs), round(self.standardDeviation, sigFigs), round(self.min, sigFigs), round(self.max, sigFigs), round(self.sum, sigFigs)]
david-ragazzi/nupic
nupic/research/monitor_mixin/metric.py
Python
gpl-3.0
2,865
#* This file is part of the MOOSE framework #* https://www.mooseframework.org #* #* All rights reserved, see COPYRIGHT for full restrictions #* https://github.com/idaholab/moose/blob/master/COPYRIGHT #* #* Licensed under LGPL 2.1, please see LICENSE for details #* https://www.gnu.org/licenses/lgpl-2.1.html import matplotlib.pyplot as plt import glob import collections import pandas import numpy as np class ConvergencePlot(object): """ A tool for making convergence plots. Args: x[np.array]: The x data of the graph (e.g., dofs) y[np.array]: The y data of the graph (e.g., L2_error) Key, value Options: xlabel[str]: The label for the x-axis ylabel[str]: The label for the y-axis """ Line = collections.namedtuple('Line', 'x y label') def __init__(self, xlabel='x', ylabel='y', fontsize=12, fit=True): self._figure = plt.figure(figsize=(10,6), facecolor='w') self._axes = plt.gca() self._axes.set_yscale('log') self._axes.set_xscale('log') # Add axis labels plt.xlabel(xlabel, fontsize=fontsize) plt.ylabel(ylabel, fontsize=fontsize) # Adjust tick mark fonts for tick in self._axes.xaxis.get_major_ticks() + self._axes.yaxis.get_major_ticks(): tick.label.set_fontsize(fontsize) # Apply grid marks plt.grid(True, which='both', color=[0.8]*3) self.label_to_slope = {} self.label_to_intercept = {} def plot(self, df, label=None, title=None, num_fitted_points=None, slope_precision=3, **kwargs): num_y_columns = len(df.columns) - 1 if label: if num_y_columns > 1: if not isinstance(label, list): raise TypeError("For multiple y-data label must be a list") if isinstance(label, list) and num_y_columns != len(label): raise IOError("The length of the label and the number of y columns must be the same") if not isinstance(label, list): label = [label] x = df[df.columns[0]] lines = [] for i in range(1,len(df.columns)): y = df[df.columns[i]] if label is None: this_label = 'line-{}'.format(len(lines)) else: this_label = label[i-1] if num_fitted_points is not None: coeffs = self._fit(x[-num_fitted_points:], y[-num_fitted_points:]) else: coeffs = self._fit(x, y) slope = coeffs[0] intercept = coeffs[1] self.label_to_slope.update({this_label:slope}) self.label_to_intercept.update({this_label:intercept}) this_label = '{}: {:.{precision}f}'.format(this_label, slope, precision=slope_precision) lines.append(self._axes.plot(x, y, label=this_label, **kwargs)[0]) if title: self._axes.set_title(title) self._axes.legend() return lines def set_title(self, title): self._axes.set_title(title) def _fit(self, x, y): """ Apply the fit and report the slope. Key, value Options: x[float]: The x-position in data coordinates. y[float]: The y-position in data coordinates. """ # Perform fit coefficients = np.polyfit(np.log10(x), np.log10(y), 1) return coefficients def save(self, filename): """ Save figure to a file. Args: filename[str]: The destination file. """ plt.savefig(filename) def show(self): """ Display the plot. """ plt.show()
harterj/moose
python/mms/ConvergencePlot.py
Python
lgpl-2.1
3,697
"""Test the Advantage Air Sensor Platform.""" from datetime import timedelta from json import loads from homeassistant.components.advantage_air.const import DOMAIN as ADVANTAGE_AIR_DOMAIN from homeassistant.components.advantage_air.sensor import ( ADVANTAGE_AIR_SERVICE_SET_TIME_TO, ADVANTAGE_AIR_SET_COUNTDOWN_VALUE, ) from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ATTR_ENTITY_ID from homeassistant.helpers import entity_registry as er from homeassistant.util import dt from tests.common import async_fire_time_changed from tests.components.advantage_air import ( TEST_SET_RESPONSE, TEST_SET_URL, TEST_SYSTEM_DATA, TEST_SYSTEM_URL, add_mock_config, ) async def test_sensor_platform(hass, aioclient_mock): """Test sensor platform.""" aioclient_mock.get( TEST_SYSTEM_URL, text=TEST_SYSTEM_DATA, ) aioclient_mock.get( TEST_SET_URL, text=TEST_SET_RESPONSE, ) await add_mock_config(hass) registry = er.async_get(hass) assert len(aioclient_mock.mock_calls) == 1 # Test First TimeToOn Sensor entity_id = "sensor.ac_one_time_to_on" state = hass.states.get(entity_id) assert state assert int(state.state) == 0 entry = registry.async_get(entity_id) assert entry assert entry.unique_id == "uniqueid-ac1-timetoOn" value = 20 await hass.services.async_call( ADVANTAGE_AIR_DOMAIN, ADVANTAGE_AIR_SERVICE_SET_TIME_TO, {ATTR_ENTITY_ID: [entity_id], ADVANTAGE_AIR_SET_COUNTDOWN_VALUE: value}, blocking=True, ) assert len(aioclient_mock.mock_calls) == 3 assert aioclient_mock.mock_calls[-2][0] == "GET" assert aioclient_mock.mock_calls[-2][1].path == "/setAircon" data = loads(aioclient_mock.mock_calls[-2][1].query["json"]) assert data["ac1"]["info"]["countDownToOn"] == value assert aioclient_mock.mock_calls[-1][0] == "GET" assert aioclient_mock.mock_calls[-1][1].path == "/getSystemData" # Test First TimeToOff Sensor entity_id = "sensor.ac_one_time_to_off" state = hass.states.get(entity_id) assert state assert int(state.state) == 10 entry = registry.async_get(entity_id) assert entry assert entry.unique_id == "uniqueid-ac1-timetoOff" value = 0 await hass.services.async_call( ADVANTAGE_AIR_DOMAIN, ADVANTAGE_AIR_SERVICE_SET_TIME_TO, {ATTR_ENTITY_ID: [entity_id], ADVANTAGE_AIR_SET_COUNTDOWN_VALUE: value}, blocking=True, ) assert len(aioclient_mock.mock_calls) == 5 assert aioclient_mock.mock_calls[-2][0] == "GET" assert aioclient_mock.mock_calls[-2][1].path == "/setAircon" data = loads(aioclient_mock.mock_calls[-2][1].query["json"]) assert data["ac1"]["info"]["countDownToOff"] == value assert aioclient_mock.mock_calls[-1][0] == "GET" assert aioclient_mock.mock_calls[-1][1].path == "/getSystemData" # Test First Zone Vent Sensor entity_id = "sensor.zone_open_with_sensor_vent" state = hass.states.get(entity_id) assert state assert int(state.state) == 100 entry = registry.async_get(entity_id) assert entry assert entry.unique_id == "uniqueid-ac1-z01-vent" # Test Second Zone Vent Sensor entity_id = "sensor.zone_closed_with_sensor_vent" state = hass.states.get(entity_id) assert state assert int(state.state) == 0 entry = registry.async_get(entity_id) assert entry assert entry.unique_id == "uniqueid-ac1-z02-vent" # Test First Zone Signal Sensor entity_id = "sensor.zone_open_with_sensor_signal" state = hass.states.get(entity_id) assert state assert int(state.state) == 40 entry = registry.async_get(entity_id) assert entry assert entry.unique_id == "uniqueid-ac1-z01-signal" # Test Second Zone Signal Sensor entity_id = "sensor.zone_closed_with_sensor_signal" state = hass.states.get(entity_id) assert state assert int(state.state) == 10 entry = registry.async_get(entity_id) assert entry assert entry.unique_id == "uniqueid-ac1-z02-signal" # Test First Zone Temp Sensor (disabled by default) entity_id = "sensor.zone_open_with_sensor_temperature" assert not hass.states.get(entity_id) registry.async_update_entity(entity_id=entity_id, disabled_by=None) await hass.async_block_till_done() async_fire_time_changed( hass, dt.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), ) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state assert int(state.state) == 25 entry = registry.async_get(entity_id) assert entry assert entry.unique_id == "uniqueid-ac1-z01-temp"
aronsky/home-assistant
tests/components/advantage_air/test_sensor.py
Python
apache-2.0
4,781
""" This provides some useful code used by other modules. This is not to be used by the end user which is why it is hidden. """ import string, sys class LinkError(Exception): pass def refine_import_err(mod_name, extension_name, exc): """ Checks to see if the ImportError was because the library itself was not there or because there was a link error. If there was a link error it raises a LinkError if not it does nothing. Keyword arguments ----------------- - mod_name : The name of the Python module that was imported. - extension_name : The name of the extension module that is to be imported by the module having mod_name. - exc : The exception raised when the module called mod_name was imported. To see example usage look at __init__.py. """ try: del sys.modules['vtk.%s'%mod_name] except KeyError: pass if string.find(str(exc), extension_name) == -1: raise LinkError, str(exc)
b3c/VTK-5.8
Wrapping/Python/vtk/__helper.py
Python
bsd-3-clause
981
""" A sub-package for efficiently dealing with polynomials. Within the documentation for this sub-package, a "finite power series," i.e., a polynomial (also referred to simply as a "series") is represented by a 1-D numpy array of the polynomial's coefficients, ordered from lowest order term to highest. For example, array([1,2,3]) represents ``P_0 + 2*P_1 + 3*P_2``, where P_n is the n-th order basis polynomial applicable to the specific module in question, e.g., `polynomial` (which "wraps" the "standard" basis) or `chebyshev`. For optimal performance, all operations on polynomials, including evaluation at an argument, are implemented as operations on the coefficients. Additional (module-specific) information can be found in the docstring for the module of interest. This package provides *convenience classes* for each of six different kinds of polynomials: ======================== ================ **Name** **Provides** ======================== ================ `~polynomial.Polynomial` Power series `~chebyshev.Chebyshev` Chebyshev series `~legendre.Legendre` Legendre series `~laguerre.Laguerre` Laguerre series `~hermite.Hermite` Hermite series `~hermite_e.HermiteE` HermiteE series ======================== ================ These *convenience classes* provide a consistent interface for creating, manipulating, and fitting data with polynomials of different bases. The convenience classes are the preferred interface for the `~numpy.polynomial` package, and are available from the ``numpy.polynomial`` namespace. This eliminates the need to navigate to the corresponding submodules, e.g. ``np.polynomial.Polynomial`` or ``np.polynomial.Chebyshev`` instead of ``np.polynomial.polynomial.Polynomial`` or ``np.polynomial.chebyshev.Chebyshev``, respectively. The classes provide a more consistent and concise interface than the type-specific functions defined in the submodules for each type of polynomial. For example, to fit a Chebyshev polynomial with degree ``1`` to data given by arrays ``xdata`` and ``ydata``, the `~chebyshev.Chebyshev.fit` class method:: >>> from numpy.polynomial import Chebyshev >>> c = Chebyshev.fit(xdata, ydata, deg=1) is preferred over the `chebyshev.chebfit` function from the ``np.polynomial.chebyshev`` module:: >>> from numpy.polynomial.chebyshev import chebfit >>> c = chebfit(xdata, ydata, deg=1) See :doc:`routines.polynomials.classes` for more details. Convenience Classes =================== The following lists the various constants and methods common to all of the classes representing the various kinds of polynomials. In the following, the term ``Poly`` represents any one of the convenience classes (e.g. `~polynomial.Polynomial`, `~chebyshev.Chebyshev`, `~hermite.Hermite`, etc.) while the lowercase ``p`` represents an **instance** of a polynomial class. Constants --------- - ``Poly.domain`` -- Default domain - ``Poly.window`` -- Default window - ``Poly.basis_name`` -- String used to represent the basis - ``Poly.maxpower`` -- Maximum value ``n`` such that ``p**n`` is allowed - ``Poly.nickname`` -- String used in printing Creation -------- Methods for creating polynomial instances. - ``Poly.basis(degree)`` -- Basis polynomial of given degree - ``Poly.identity()`` -- ``p`` where ``p(x) = x`` for all ``x`` - ``Poly.fit(x, y, deg)`` -- ``p`` of degree ``deg`` with coefficients determined by the least-squares fit to the data ``x``, ``y`` - ``Poly.fromroots(roots)`` -- ``p`` with specified roots - ``p.copy()`` -- Create a copy of ``p`` Conversion ---------- Methods for converting a polynomial instance of one kind to another. - ``p.cast(Poly)`` -- Convert ``p`` to instance of kind ``Poly`` - ``p.convert(Poly)`` -- Convert ``p`` to instance of kind ``Poly`` or map between ``domain`` and ``window`` Calculus -------- - ``p.deriv()`` -- Take the derivative of ``p`` - ``p.integ()`` -- Integrate ``p`` Validation ---------- - ``Poly.has_samecoef(p1, p2)`` -- Check if coefficients match - ``Poly.has_samedomain(p1, p2)`` -- Check if domains match - ``Poly.has_sametype(p1, p2)`` -- Check if types match - ``Poly.has_samewindow(p1, p2)`` -- Check if windows match Misc ---- - ``p.linspace()`` -- Return ``x, p(x)`` at equally-spaced points in ``domain`` - ``p.mapparms()`` -- Return the parameters for the linear mapping between ``domain`` and ``window``. - ``p.roots()`` -- Return the roots of `p`. - ``p.trim()`` -- Remove trailing coefficients. - ``p.cutdeg(degree)`` -- Truncate p to given degree - ``p.truncate(size)`` -- Truncate p to given size """ from .polynomial import Polynomial from .chebyshev import Chebyshev from .legendre import Legendre from .hermite import Hermite from .hermite_e import HermiteE from .laguerre import Laguerre __all__ = [ "set_default_printstyle", "polynomial", "Polynomial", "chebyshev", "Chebyshev", "legendre", "Legendre", "hermite", "Hermite", "hermite_e", "HermiteE", "laguerre", "Laguerre", ] def set_default_printstyle(style): """ Set the default format for the string representation of polynomials. Values for ``style`` must be valid inputs to ``__format__``, i.e. 'ascii' or 'unicode'. Parameters ---------- style : str Format string for default printing style. Must be either 'ascii' or 'unicode'. Notes ----- The default format depends on the platform: 'unicode' is used on Unix-based systems and 'ascii' on Windows. This determination is based on default font support for the unicode superscript and subscript ranges. Examples -------- >>> p = np.polynomial.Polynomial([1, 2, 3]) >>> c = np.polynomial.Chebyshev([1, 2, 3]) >>> np.polynomial.set_default_printstyle('unicode') >>> print(p) 1.0 + 2.0·x¹ + 3.0·x² >>> print(c) 1.0 + 2.0·T₁(x) + 3.0·T₂(x) >>> np.polynomial.set_default_printstyle('ascii') >>> print(p) 1.0 + 2.0 x**1 + 3.0 x**2 >>> print(c) 1.0 + 2.0 T_1(x) + 3.0 T_2(x) >>> # Formatting supersedes all class/package-level defaults >>> print(f"{p:unicode}") 1.0 + 2.0·x¹ + 3.0·x² """ if style not in ('unicode', 'ascii'): raise ValueError( f"Unsupported format string '{style}'. Valid options are 'ascii' " f"and 'unicode'" ) _use_unicode = True if style == 'ascii': _use_unicode = False from ._polybase import ABCPolyBase ABCPolyBase._use_unicode = _use_unicode from numpy._pytesttester import PytestTester test = PytestTester(__name__) del PytestTester
simongibbons/numpy
numpy/polynomial/__init__.py
Python
bsd-3-clause
6,788
import os import re import sys def ReadFileAsLines(filename): """Reads a file, removing blank lines and lines that start with #""" file = open(filename, "r") raw_lines = file.readlines() file.close() lines = [] for line in raw_lines: line = line.strip() if len(line) > 0 and not line.startswith("#"): lines.append(line) return lines def GetSuiteName(testName): return testName[:testName.find("/")] def GetTestName(testName): replacements = {".test": "", ".": "_"} splitTestName = testName.split("/") cleanName = splitTestName[-2] + "_" + splitTestName[-1] for replaceKey in replacements: cleanName = cleanName.replace(replaceKey, replacements[replaceKey]) return cleanName def GenerateTests(outFile, testNames): # Remove duplicate tests testNames = list(set(testNames)) testSuites = [] outFile.write("#include \"gles_conformance_tests.h\"\n\n") for test in testNames: testSuite = GetSuiteName(test) if not testSuite in testSuites: outFile.write("DEFINE_CONFORMANCE_TEST_CLASS(" + testSuite + ");\n\n") testSuites.append(testSuite) outFile.write("TYPED_TEST(" + testSuite + ", " + GetTestName(test) + ")\n") outFile.write("{\n") outFile.write(" run(\"" + test + "\");\n") outFile.write("}\n\n") def GenerateTestList(sourceFile, rootDir): tests = [] fileName, fileExtension = os.path.splitext(sourceFile) if fileExtension == ".run": lines = ReadFileAsLines(sourceFile) for line in lines: tests += GenerateTestList(os.path.join(os.path.dirname(sourceFile), line), rootDir) elif fileExtension == ".test": tests.append(os.path.relpath(os.path.realpath(sourceFile), rootDir).replace("\\", "/")) return tests def main(argv): tests = GenerateTestList(argv[0], argv[1]) tests.sort() output = open(argv[2], 'wb') GenerateTests(output, tests) output.close() return 0 if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
endlessm/chromium-browser
third_party/angle/src/tests/gles_conformance_tests/generate_gles_conformance_tests.py
Python
bsd-3-clause
2,098
# coding: utf-8 # Copyright (c) Pymatgen Development Team. # Distributed under the terms of the MIT License. """ This package contains various command line wrappers to programs used in pymatgen that do not have Python equivalents. """
gVallverdu/pymatgen
pymatgen/command_line/__init__.py
Python
mit
237
"""Tests for distutils.command.build_scripts.""" import os import unittest from distutils.command.build_scripts import build_scripts from distutils.core import Distribution import sysconfig from distutils.tests import support from test.test_support import run_unittest class BuildScriptsTestCase(support.TempdirManager, support.LoggingSilencer, unittest.TestCase): def test_default_settings(self): cmd = self.get_build_scripts_cmd("/foo/bar", []) self.assertTrue(not cmd.force) self.assertTrue(cmd.build_dir is None) cmd.finalize_options() self.assertTrue(cmd.force) self.assertEqual(cmd.build_dir, "/foo/bar") def test_build(self): source = self.mkdtemp() target = self.mkdtemp() expected = self.write_sample_scripts(source) cmd = self.get_build_scripts_cmd(target, [os.path.join(source, fn) for fn in expected]) cmd.finalize_options() cmd.run() built = os.listdir(target) for name in expected: self.assertTrue(name in built) def get_build_scripts_cmd(self, target, scripts): import sys dist = Distribution() dist.scripts = scripts dist.command_obj["build"] = support.DummyCommand( build_scripts=target, force=1, executable=sys.executable ) return build_scripts(dist) def write_sample_scripts(self, dir): expected = [] expected.append("script1.py") self.write_script(dir, "script1.py", ("#! /usr/bin/env python2.3\n" "# bogus script w/ Python sh-bang\n" "pass\n")) expected.append("script2.py") self.write_script(dir, "script2.py", ("#!/usr/bin/python\n" "# bogus script w/ Python sh-bang\n" "pass\n")) expected.append("shell.sh") self.write_script(dir, "shell.sh", ("#!/bin/sh\n" "# bogus shell script w/ sh-bang\n" "exit 0\n")) return expected def write_script(self, dir, name, text): f = open(os.path.join(dir, name), "w") try: f.write(text) finally: f.close() def test_version_int(self): source = self.mkdtemp() target = self.mkdtemp() expected = self.write_sample_scripts(source) cmd = self.get_build_scripts_cmd(target, [os.path.join(source, fn) for fn in expected]) cmd.finalize_options() # http://bugs.python.org/issue4524 # # On linux-g++-32 with command line `./configure --enable-ipv6 # --with-suffix=3`, python is compiled okay but the build scripts # failed when writing the name of the executable old = sysconfig.get_config_vars().get('VERSION') sysconfig._CONFIG_VARS['VERSION'] = 4 try: cmd.run() finally: if old is not None: sysconfig._CONFIG_VARS['VERSION'] = old built = os.listdir(target) for name in expected: self.assertTrue(name in built) def test_suite(): return unittest.makeSuite(BuildScriptsTestCase) if __name__ == "__main__": run_unittest(test_suite())
ktan2020/legacy-automation
win/Lib/distutils/tests/test_build_scripts.py
Python
mit
3,712
# -*- coding: utf-8 -*- # # synapsecollection.py # # This file is part of NEST. # # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # # NEST is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with NEST. If not, see <http://www.gnu.org/licenses/>. """ Example script to show some of the possibilities of the SynapseCollection class. We connect neurons, and get the SynapseCollection with a GetConnections call. To get a better understanding of the connections, we plot the weights between the source and targets. """ import nest import matplotlib.pyplot as plt import numpy as np def makeMatrix(sources, targets, weights): """ Returns a matrix with the weights between the source and target node_ids. """ aa = np.zeros((max(sources)+1, max(targets)+1)) for src, trg, wght in zip(sources, targets, weights): aa[src, trg] += wght return aa def plotMatrix(srcs, tgts, weights, title, pos): """ Plots weight matrix. """ plt.subplot(pos) plt.matshow(makeMatrix(srcs, tgts, weights), fignum=False) plt.xlim([min(tgts)-0.5, max(tgts)+0.5]) plt.xlabel('target') plt.ylim([max(srcs)+0.5, min(srcs)-0.5]) plt.ylabel('source') plt.title(title) plt.colorbar(fraction=0.046, pad=0.04) """ Start with a simple, one_to_one example. We create the neurons, connect them, and get the connections. From this we can get the connected sources, targets, and weights. The corresponding matrix will be the identity matrix, as we have a one_to_one connection. """ nest.ResetKernel() nrns = nest.Create('iaf_psc_alpha', 10) nest.Connect(nrns, nrns, 'one_to_one') conns = nest.GetConnections(nrns, nrns) # This returns a SynapseCollection # We can get desired information of the SynapseCollection with simple get() call. g = conns.get(['source', 'target', 'weight']) srcs = g['source'] tgts = g['target'] weights = g['weight'] # Plot the matrix consisting of the weights between the sources and targets plt.figure(figsize=(12, 10)) plotMatrix(srcs, tgts, weights, 'Uniform weight', 121) """ Add some weights to the connections, and plot the updated weight matrix. """ # We can set data of the connections with a simple set() call. w = [{'weight': x*1.0} for x in range(1, 11)] conns.set(w) weights = conns.weight plotMatrix(srcs, tgts, weights, 'Set weight', 122) """ We can also plot an all_to_all connection, with uniformly distributed weights, and different number of sources and targets. """ nest.ResetKernel() pre = nest.Create('iaf_psc_alpha', 10) post = nest.Create('iaf_psc_delta', 5) nest.Connect(pre, post, syn_spec={'weight': {'distribution': 'uniform', 'low': 0.5, 'high': 4.5}}) # Get a SynapseCollection with all connections conns = nest.GetConnections() srcs = conns.source tgts = conns.target weights = conns.weight plt.figure(figsize=(12, 10)) plotMatrix(srcs, tgts, weights, 'All to all connection', 111) """ Lastly, we'll do an exmple that is a bit more complex. We connect different neurons with different rules, synapse models and weight distributions, and get different SynapseCollections by calling GetConnections with different inputs. """ nest.ResetKernel() nrns = nest.Create('iaf_psc_alpha', 15) nest.Connect(nrns[:5], nrns[:5], 'one_to_one', {'synapse_model': 'stdp_synapse', 'weight': {'distribution': 'normal', 'mu': 5.0, 'sigma': 2.0}}) nest.Connect(nrns[:10], nrns[5:12], {'rule': 'pairwise_bernoulli', 'p': 0.4}, {'weight': 4.0}) nest.Connect(nrns[5:10], nrns[:5], {'rule': 'fixed_total_number', 'N': 5}, {'weight': 3.0}) nest.Connect(nrns[10:], nrns[:12], 'all_to_all', {'synapse_model': 'stdp_synapse', 'weight': {'distribution': 'uniform', 'low': 1., 'high': 5.}}) nest.Connect(nrns, nrns[12:], {'rule': 'fixed_indegree', 'indegree': 3}) # First get a SynapseCollection consisting of all the connections conns = nest.GetConnections() srcs = conns.source tgts = conns.target weights = conns.weight plt.figure(figsize=(14, 12)) plotMatrix(list(srcs), list(tgts), weights, 'All connections', 221) # Get SynapseCollection consisting of a subset of connections conns = nest.GetConnections(nrns[:10], nrns[:10]) g = conns.get(['source', 'target', 'weight']) srcs = g['source'] tgts = g['target'] weights = g['weight'] plotMatrix(srcs, tgts, weights, 'Connections of the first ten neurons', 222) # Get SynapseCollection consisting of just the stdp_synapses conns = nest.GetConnections(synapse_model='stdp_synapse') g = conns.get(['source', 'target', 'weight']) srcs = g['source'] tgts = g['target'] weights = g['weight'] plotMatrix(srcs, tgts, weights, 'Connections with stdp_synapse', 223) # Get SynapseCollection consisting of the fixed_total_number connections, but set # weight before plotting conns = nest.GetConnections(nrns[5:10], nrns[:5]) w = [{'weight': x*1.0} for x in range(1, 6)] conns.set(w) g = conns.get(['source', 'target', 'weight']) srcs = g['source'] tgts = g['target'] weights = g['weight'] plotMatrix(srcs, tgts, weights, 'fixed_total_number, set weight', 224) plt.show()
SepehrMN/nest-simulator
pynest/examples/synapsecollection.py
Python
gpl-2.0
5,672
#!/usr/bin/env python """ The LibVMI Library is an introspection library that simplifies access to memory in a target virtual machine or in a file containing a dump of a system's physical memory. LibVMI is based on the XenAccess Library. Copyright 2011 Sandia Corporation. Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains certain rights in this software. Author: Bryan D. Payne ([email protected]) This file is part of LibVMI. LibVMI is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. LibVMI is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with LibVMI. If not, see <http://www.gnu.org/licenses/>. """ import pyvmi import sys def get_processes(vmi): tasks_offset = vmi.get_offset("win_tasks") name_offset = vmi.get_offset("win_pname") - tasks_offset pid_offset = vmi.get_offset("win_pid") - tasks_offset list_head = vmi.read_addr_ksym("PsInitialSystemProcess") next_process = vmi.read_addr_va(list_head + tasks_offset, 0) list_head = next_process while True: procname = vmi.read_str_va(next_process + name_offset, 0) pid = vmi.read_32_va(next_process + pid_offset, 0) next_process = vmi.read_addr_va(next_process, 0) if (pid < 1<<16): yield pid, procname if (list_head == next_process): break def main(argv): vmi = pyvmi.init(argv[1], "complete") for pid, procname in get_processes(vmi): print "[%5d] %s" % (pid, procname) if __name__ == "__main__": main(sys.argv)
jie-lin/libvmi
tools/pyvmi/examples/process-list.py
Python
gpl-3.0
1,982
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2019, Anusha Hegde <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: vmware_guest_serial_port short_description: Manage serial ports on an existing VM version_added: "2.10" description: - "This module can be used to manage serial ports on an existing VM" options: name: description: - Name of the virtual machine. - This is a required parameter, if parameter C(uuid) or C(moid) is not supplied. type: str uuid: description: - UUID of the instance to manage the serial ports, this is VMware's unique identifier. - This is a required parameter, if parameter C(name) or C(moid) is not supplied. type: str moid: description: - Managed Object ID of the instance to manage if known, this is a unique identifier only within a single vCenter instance. - This is required if C(name) or C(uuid) is not supplied. type: str use_instance_uuid: description: - Whether to use the VMware instance UUID rather than the BIOS UUID. default: no type: bool backings: type: list description: - A list of backings for serial ports. - 'C(backing_type) (str): is required to add or reconfigure or remove an existing serial port.' - 'Valid attributes are:' - ' - C(backing_type) (str): Backing type is required for the serial ports to be added or reconfigured or removed.' - ' - C(state) (str): is required to identify whether we are adding, modifying or removing the serial port. - choices: - C(present): modify an existing serial port. C(backing_type) is required to determine the port. The first matching C(backing_type) and either of C(service_uri) or C(pipe_name) or C(device_name) or C(file_path) will be modified. If there is only one device with a backing type, the secondary details are not needed. We will match the last such device with the given backing type. - C(absent): remove an existing serial port. C(backing_type) is required to determine the port. The first matching C(backing_type) and either of C(service_uri) or C(pipe_name) or C(device_name) or C(file_path) will be removed. If there is only one device with a backing type, the secondary details are not needed. We will match the last such device with the given backing type.' - ' - C(yield_on_poll) (bool): Enables CPU yield behavior. Default value is true.' - ' - C(direction) (str): Required when I(backing_type=network). The direction of the connection. - choices: - client - server' - ' - C(service_uri) (str): Required when I(backing_type=network). Identifies the local host or a system on the network, depending on the value of I(direction). If you use the virtual machine as a server, the URI identifies the host on which the virtual machine runs. In this case, the host name part of the URI should be empty, or it should specify the address of the local host. If you use the virtual machine as a client, the URI identifies the remote system on the network.' - ' - C(endpoint) (str): Required when I(backing_type=pipe). When you use serial port pipe backing to connect a virtual machine to another process, you must define the endpoints.' - ' - C(no_rx_loss) (bool): Required when I(backing_type=pipe). Enables optimized data transfer over the pipe. - choices: - client - server' - ' - C(pipe_name) (str): Required when I(backing_type=pipe).' - ' - C(device_name) (str): Required when I(backing_type=device).' - ' - C(file_path) (str): Required when I(backing_type=file). File path for the host file used in this backing. Fully qualified path is required, like <datastore_name>/<file_name>' extends_documentation_fragment: - vmware.documentation author: - Anusha Hegde (@anusha94) ''' EXAMPLES = ''' # Create serial ports - name: Create multiple serial ports with Backing type - network, pipe, device and file vmware_guest_serial_port: hostname: "{{ vcenter_hostname }}" username: "{{ vcenter_username }}" password: "{{ vcenter_password }}" validate_certs: no name: "test_vm1" backings: - type: 'network' direction: 'client' service_uri: 'tcp://6000' yield_on_poll: True - type: 'pipe' pipe_name: 'serial_pipe' endpoint: 'client' - type: 'device' device_name: '/dev/char/serial/uart0' - type: 'file' file_path: '[datastore1]/file1' yield_on_poll: True register: create_multiple_ports # Modify existing serial port - name: Modify Network backing type vmware_guest_serial_port: hostname: '{{ vcenter_hostname }}' username: '{{ vcenter_username }}' password: '{{ vcenter_password }}' name: '{{ name }}' backings: - type: 'network' state: 'present' direction: 'server' service_uri: 'tcp://6000' delegate_to: localhost # Remove serial port - name: Remove pipe backing type vmware_guest_serial_port: hostname: '{{ vcenter_hostname }}' username: '{{ vcenter_username }}' password: '{{ vcenter_password }}' name: '{{ name }}' backings: - type: 'pipe' state: 'absent' delegate_to: localhost ''' RETURN = r''' serial_port_data: description: metadata about the virtual machine's serial ports after managing them returned: always type: dict sample: [ { "backing_type": "network", "direction": "client", "service_uri": "tcp://6000" }, { "backing_type": "pipe", "direction": "server", "pipe_name": "serial pipe" }, ] ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec, wait_for_task from ansible.module_utils._text import to_native try: from pyVmomi import vim except ImportError: pass class PyVmomiHelper(PyVmomi): """ This class is a helper to create easily VMware Spec for PyVmomiHelper """ def __init__(self, module): super(PyVmomiHelper, self).__init__(module) self.change_applied = False # a change was applied meaning at least one task succeeded self.config_spec = vim.vm.ConfigSpec() self.config_spec.deviceChange = [] self.serial_ports = [] def check_vm_state(self, vm_obj): """ To add serial port, the VM must be in powered off state Input: - vm: Virtual Machine Output: - True if vm is in poweredOff state - module fails otherwise """ if vm_obj.runtime.powerState == vim.VirtualMachinePowerState.poweredOff: return True else: self.module.fail_json(msg="A serial device cannot be added to a VM in the current state(" + vm_obj.runtime.powerState + ")." + "Please use the vmware_guest_powerstate module to power off the VM") def get_serial_port_config_spec(self, vm_obj): """ Variables changed: - self.config_spec - self.change_applied """ # create serial config spec for adding, editing, removing for backing in self.params.get('backings'): backing_keys = backing.keys() serial_port = get_serial_port(vm_obj, backing) if serial_port is None and 'state' not in backing_keys: # if serial port is None and state is not mentioned # create a new serial port serial_port_spec = self.create_serial_port(backing) serial_port_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.add self.serial_ports.append(serial_port_spec) self.change_applied = True else: if serial_port is not None and 'state' in backing_keys: serial_spec = vim.vm.device.VirtualDeviceSpec() serial_spec.device = serial_port if backing['state'].lower() == 'present': # modify existing serial port serial_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit serial_spec.device.backing = self.get_backing_info(serial_port, backing, backing['type']) self.change_applied = True self.config_spec.deviceChange.append(serial_spec) elif backing['state'].lower() == 'absent': # remove serial port serial_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.remove self.change_applied = True self.config_spec.deviceChange.append(serial_spec) else: self.module.fail_json(msg='Unable to find the specified serial port: %s' % backing) def reconfigure_vm_serial_port(self, vm_obj): """ Reconfigure vm with new or modified serial port config spec """ self.get_serial_port_config_spec(vm_obj) try: # configure create tasks first if self.serial_ports: for serial_port in self.serial_ports: # each type of serial port is of config_spec.device = vim.vm.device.VirtualSerialPort() object type # because serial ports differ in the backing types and config_spec.device has to be unique, # we are creating a new spec for every create port configuration spec = vim.vm.ConfigSpec() spec.deviceChange.append(serial_port) task = vm_obj.ReconfigVM_Task(spec=spec) wait_for_task(task) task = vm_obj.ReconfigVM_Task(spec=self.config_spec) wait_for_task(task) except vim.fault.InvalidDatastorePath as e: self.module.fail_json(msg="Failed to configure serial port on given virtual machine due to invalid path: %s" % to_native(e.msg)) except vim.fault.RestrictedVersion as e: self.module.fail_json(msg="Failed to reconfigure virtual machine due to product versioning restrictions: %s" % to_native(e.msg)) if task.info.state == 'error': results = {'changed': self.change_applied, 'failed': True, 'msg': task.info.error.msg} else: serial_port_info = get_serial_port_info(vm_obj) results = {'changed': self.change_applied, 'failed': False, 'serial_port_info': serial_port_info} return results def set_network_backing(self, serial_port, backing_info): """ Set the networking backing params """ required_params = ['service_uri', 'direction'] if set(required_params).issubset(backing_info.keys()): backing = serial_port.URIBackingInfo() backing.serviceURI = backing_info['service_uri'] backing.direction = backing_info['direction'] else: self.module.fail_json(msg="Failed to create a new serial port of network backing type due to insufficient parameters." + "The required parameters are service_uri and direction") return backing def set_pipe_backing(self, serial_port, backing_info): """ Set the pipe backing params """ required_params = ['pipe_name', 'endpoint'] if set(required_params).issubset(backing_info.keys()): backing = serial_port.PipeBackingInfo() backing.pipeName = backing_info['pipe_name'] backing.endpoint = backing_info['endpoint'] else: self.module.fail_json(msg="Failed to create a new serial port of pipe backing type due to insufficient parameters." + "The required parameters are pipe_name and endpoint") # since no_rx_loss is an optional argument, so check if the key is present if 'no_rx_loss' in backing_info.keys() and backing_info['no_rx_loss']: backing.noRxLoss = backing_info['no_rx_loss'] return backing def set_device_backing(self, serial_port, backing_info): """ Set the device backing params """ required_params = ['device_name'] if set(required_params).issubset(backing_info.keys()): backing = serial_port.DeviceBackingInfo() backing.deviceName = backing_info['device_name'] else: self.module.fail_json(msg="Failed to create a new serial port of device backing type due to insufficient parameters." + "The required parameters are device_name") return backing def set_file_backing(self, serial_port, backing_info): """ Set the file backing params """ required_params = ['file_path'] if set(required_params).issubset(backing_info.keys()): backing = serial_port.FileBackingInfo() backing.fileName = backing_info['file_path'] else: self.module.fail_json(msg="Failed to create a new serial port of file backing type due to insufficient parameters." + "The required parameters are file_path") return backing def get_backing_info(self, serial_port, backing, backing_type): """ Returns the call to the appropriate backing function based on the backing type """ switcher = { "network": self.set_network_backing, "pipe": self.set_pipe_backing, "device": self.set_device_backing, "file": self.set_file_backing } backing_func = switcher.get(backing_type, "Invalid Backing Info") return backing_func(serial_port, backing) def create_serial_port(self, backing): """ Create a new serial port """ serial_spec = vim.vm.device.VirtualDeviceSpec() serial_port = vim.vm.device.VirtualSerialPort() serial_port.yieldOnPoll = backing['yield_on_poll'] if 'yield_on_poll' in backing.keys() else True serial_port.backing = self.get_backing_info(serial_port, backing, backing['type']) serial_spec.device = serial_port return serial_spec def get_serial_port(vm_obj, backing): """ Return the serial port of specified backing type """ serial_port = None backing_type_mapping = { 'network': vim.vm.device.VirtualSerialPort.URIBackingInfo, 'pipe': vim.vm.device.VirtualSerialPort.PipeBackingInfo, 'device': vim.vm.device.VirtualSerialPort.DeviceBackingInfo, 'file': vim.vm.device.VirtualSerialPort.FileBackingInfo } valid_params = backing.keys() for device in vm_obj.config.hardware.device: if isinstance(device, vim.vm.device.VirtualSerialPort): if isinstance(device.backing, backing_type_mapping[backing['type']]): if 'service_uri' in valid_params: # network backing type if device.backing.serviceURI == backing['service_uri']: serial_port = device break elif 'pipe_name' in valid_params: # named pipe backing type if device.backing.pipeName == backing['pipe_name']: serial_port = device break elif 'device_name' in valid_params: # physical serial device backing type if device.backing.deviceName == backing['device_name']: serial_port = device break elif 'file_path' in valid_params: # file backing type if device.backing.fileName == backing['file_path']: serial_port = device break # if there is a backing of only one type, user need not provide secondary details like service_uri, pipe_name, device_name or file_path # we will match the serial port with backing type only # in this case, the last matching serial port will be returned serial_port = device return serial_port def get_serial_port_info(vm_obj): """ Get the serial port info """ serial_port_info = [] if vm_obj is None: return serial_port_info for port in vm_obj.config.hardware.device: backing = dict() if isinstance(port, vim.vm.device.VirtualSerialPort): if isinstance(port.backing, vim.vm.device.VirtualSerialPort.URIBackingInfo): backing['backing_type'] = 'network' backing['direction'] = port.backing.direction backing['service_uri'] = port.backing.serviceURI elif isinstance(port.backing, vim.vm.device.VirtualSerialPort.PipeBackingInfo): backing['backing_type'] = 'pipe' backing['pipe_name'] = port.backing.pipeName backing['endpoint'] = port.backing.endpoint backing['no_rx_loss'] = port.backing.noRxLoss elif isinstance(port.backing, vim.vm.device.VirtualSerialPort.DeviceBackingInfo): backing['backing_type'] = 'device' backing['device_name'] = port.backing.deviceName elif isinstance(port.backing, vim.vm.device.VirtualSerialPort.FileBackingInfo): backing['backing_type'] = 'file' backing['file_path'] = port.backing.fileName else: continue serial_port_info.append(backing) return serial_port_info def main(): """ Main method """ argument_spec = vmware_argument_spec() argument_spec.update( name=dict(type='str'), uuid=dict(type='str'), moid=dict(type='str'), use_instance_uuid=dict(type='bool', default=False), backings=dict(type='list', default=[]) ) module = AnsibleModule( argument_spec=argument_spec, required_one_of=[ ['name', 'uuid', 'moid'] ], mutually_exclusive=[ ['name', 'uuid', 'moid'] ], ) result = {'failed': False, 'changed': False} pyv = PyVmomiHelper(module) # Check if the VM exists before continuing vm_obj = pyv.get_vm() if vm_obj: proceed = pyv.check_vm_state(vm_obj) if proceed: result = pyv.reconfigure_vm_serial_port(vm_obj) else: # We are unable to find the virtual machine user specified # Bail out vm_id = (module.params.get('name') or module.params.get('uuid') or module.params.get('vm_id')) module.fail_json(msg="Unable to manage serial ports for non-existing" " virtual machine '%s'." % vm_id) if result['failed']: module.fail_json(**result) else: module.exit_json(**result) if __name__ == '__main__': main()
roadmapper/ansible
lib/ansible/modules/cloud/vmware/vmware_guest_serial_port.py
Python
gpl-3.0
19,614
# Copyright (C) 2010-2014 GRNET S.A. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.core import urlresolvers from django.views.decorators import csrf from django.conf.urls import patterns def _patch_pattern(regex_pattern): """ Patch pattern callback using csrf_exempt. Enforce RegexURLPattern callback to get resolved if required. """ regex_pattern._callback = \ csrf.csrf_exempt(regex_pattern.callback) def _patch_resolver(r): """ Patch all patterns found in resolver with _patch_pattern """ if hasattr(r, 'url_patterns'): entries = r.url_patterns else: # first level view in patterns ? entries = [r] for entry in entries: if isinstance(entry, urlresolvers.RegexURLResolver): _patch_resolver(entry) #if isinstance(entry, urlresolvers.RegexURLPattern): # let it break... else: _patch_pattern(entry) def api_patterns(*args, **kwargs): """ Protect all url patterns from csrf attacks. """ _patterns = patterns(*args, **kwargs) for entry in _patterns: _patch_resolver(entry) return _patterns
allmende/synnefo
snf-django-lib/snf_django/lib/api/urls.py
Python
gpl-3.0
1,762
# Copyright 2013 Mirantis Inc. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # @author: Eugene Nikanorov, Mirantis Inc. # import sys from neutronclient.neutron.v2_0 import servicetype from neutronclient.tests.unit import test_cli20 class CLITestV20ServiceProvidersJSON(test_cli20.CLITestV20Base): id_field = "name" def setUp(self): super(CLITestV20ServiceProvidersJSON, self).setUp( plurals={'tags': 'tag'} ) def test_list_service_providers(self): resources = "service_providers" cmd = servicetype.ListServiceProvider(test_cli20.MyApp(sys.stdout), None) self._test_list_resources(resources, cmd, True) def test_list_service_providers_pagination(self): resources = "service_providers" cmd = servicetype.ListServiceProvider(test_cli20.MyApp(sys.stdout), None) self._test_list_resources_with_pagination(resources, cmd) def test_list_service_providers_sort(self): resources = "service_providers" cmd = servicetype.ListServiceProvider(test_cli20.MyApp(sys.stdout), None) self._test_list_resources(resources, cmd, sort_key=["name"], sort_dir=["asc", "desc"]) def test_list_service_providers_limit(self): resources = "service_providers" cmd = servicetype.ListServiceProvider(test_cli20.MyApp(sys.stdout), None) self._test_list_resources(resources, cmd, page_size=1000) class CLITestV20ServiceProvidersXML(CLITestV20ServiceProvidersJSON): format = 'xml'
vijayendrabvs/ssl-python-neutronclient
neutronclient/tests/unit/test_cli20_servicetype.py
Python
apache-2.0
2,313
# stdlib from collections import defaultdict import time # 3p import psutil # project from checks import AgentCheck from config import _is_affirmative from utils.platform import Platform DEFAULT_AD_CACHE_DURATION = 120 DEFAULT_PID_CACHE_DURATION = 120 ATTR_TO_METRIC = { 'thr': 'threads', 'cpu': 'cpu.pct', 'rss': 'mem.rss', 'vms': 'mem.vms', 'real': 'mem.real', 'open_fd': 'open_file_descriptors', 'r_count': 'ioread_count', # FIXME: namespace me correctly (6.x), io.r_count 'w_count': 'iowrite_count', # FIXME: namespace me correctly (6.x) io.r_bytes 'r_bytes': 'ioread_bytes', # FIXME: namespace me correctly (6.x) io.w_count 'w_bytes': 'iowrite_bytes', # FIXME: namespace me correctly (6.x) io.w_bytes 'ctx_swtch_vol': 'voluntary_ctx_switches', # FIXME: namespace me correctly (6.x), ctx_swt.voluntary 'ctx_swtch_invol': 'involuntary_ctx_switches', # FIXME: namespace me correctly (6.x), ctx_swt.involuntary } class ProcessCheck(AgentCheck): def __init__(self, name, init_config, agentConfig, instances=None): AgentCheck.__init__(self, name, init_config, agentConfig, instances) # ad stands for access denied # We cache the PIDs getting this error and don't iterate on them # more often than `access_denied_cache_duration` # This cache is for all PIDs so it's global, but it should # be refreshed by instance self.last_ad_cache_ts = {} self.ad_cache = set() self.access_denied_cache_duration = int( init_config.get( 'access_denied_cache_duration', DEFAULT_AD_CACHE_DURATION ) ) # By default cache the PID list for a while # Sometimes it's not wanted b/c it can mess with no-data monitoring # This cache is indexed per instance self.last_pid_cache_ts = {} self.pid_cache = {} self.pid_cache_duration = int( init_config.get( 'pid_cache_duration', DEFAULT_PID_CACHE_DURATION ) ) # Process cache, indexed by instance self.process_cache = defaultdict(dict) def should_refresh_ad_cache(self, name): now = time.time() return now - self.last_ad_cache_ts.get(name, 0) > self.access_denied_cache_duration def should_refresh_pid_cache(self, name): now = time.time() return now - self.last_pid_cache_ts.get(name, 0) > self.pid_cache_duration def find_pids(self, name, search_string, exact_match, ignore_ad=True): """ Create a set of pids of selected processes. Search for search_string """ if not self.should_refresh_pid_cache(name): return self.pid_cache[name] ad_error_logger = self.log.debug if not ignore_ad: ad_error_logger = self.log.error refresh_ad_cache = self.should_refresh_ad_cache(name) matching_pids = set() for proc in psutil.process_iter(): # Skip access denied processes if not refresh_ad_cache and proc.pid in self.ad_cache: continue found = False for string in search_string: try: # FIXME 6.x: All has been deprecated from the doc, should be removed if string == 'All': found = True if exact_match: if proc.name() == string: found = True else: cmdline = proc.cmdline() if string in ' '.join(cmdline): found = True except psutil.NoSuchProcess: self.log.warning('Process disappeared while scanning') except psutil.AccessDenied, e: ad_error_logger('Access denied to process with PID %s', proc.pid) ad_error_logger('Error: %s', e) if refresh_ad_cache: self.ad_cache.add(proc.pid) if not ignore_ad: raise else: if refresh_ad_cache: self.ad_cache.discard(proc.pid) if found: matching_pids.add(proc.pid) break self.pid_cache[name] = matching_pids self.last_pid_cache_ts[name] = time.time() if refresh_ad_cache: self.last_ad_cache_ts[name] = time.time() return matching_pids def psutil_wrapper(self, process, method, accessors, *args, **kwargs): """ A psutil wrapper that is calling * psutil.method(*args, **kwargs) and returns the result OR * psutil.method(*args, **kwargs).accessor[i] for each accessors given in a list, the result being indexed in a dictionary by the accessor name """ if accessors is None: result = None else: result = {} # Ban certain method that we know fail if method == 'memory_info_ex'\ and (Platform.is_win32() or Platform.is_solaris()): return result elif method == 'num_fds' and not Platform.is_unix(): return result try: res = getattr(process, method)(*args, **kwargs) if accessors is None: result = res else: for acc in accessors: try: result[acc] = getattr(res, acc) except AttributeError: self.log.debug("psutil.%s().%s attribute does not exist", method, acc) except (NotImplementedError, AttributeError): self.log.debug("psutil method %s not implemented", method) except psutil.AccessDenied: self.log.debug("psutil was denied acccess for method %s", method) except psutil.NoSuchProcess: self.warning("Process {0} disappeared while scanning".format(process.pid)) return result def get_process_state(self, name, pids): st = defaultdict(list) # Remove from cache the processes that are not in `pids` cached_pids = set(self.process_cache[name].keys()) pids_to_remove = cached_pids - pids for pid in pids_to_remove: del self.process_cache[name][pid] for pid in pids: st['pids'].append(pid) new_process = False # If the pid's process is not cached, retrieve it if pid not in self.process_cache[name] or not self.process_cache[name][pid].is_running(): new_process = True try: self.process_cache[name][pid] = psutil.Process(pid) self.log.debug('New process in cache: %s' % pid) # Skip processes dead in the meantime except psutil.NoSuchProcess: self.warning('Process %s disappeared while scanning' % pid) # reset the PID cache now, something changed self.last_pid_cache_ts[name] = 0 continue p = self.process_cache[name][pid] meminfo = self.psutil_wrapper(p, 'memory_info', ['rss', 'vms']) st['rss'].append(meminfo.get('rss')) st['vms'].append(meminfo.get('vms')) # will fail on win32 and solaris shared_mem = self.psutil_wrapper(p, 'memory_info_ex', ['shared']).get('shared') if shared_mem is not None and meminfo.get('rss') is not None: st['real'].append(meminfo['rss'] - shared_mem) else: st['real'].append(None) ctxinfo = self.psutil_wrapper(p, 'num_ctx_switches', ['voluntary', 'involuntary']) st['ctx_swtch_vol'].append(ctxinfo.get('voluntary')) st['ctx_swtch_invol'].append(ctxinfo.get('involuntary')) st['thr'].append(self.psutil_wrapper(p, 'num_threads', None)) cpu_percent = self.psutil_wrapper(p, 'cpu_percent', None) if not new_process: # psutil returns `0.` for `cpu_percent` the first time it's sampled on a process, # so save the value only on non-new processes st['cpu'].append(cpu_percent) st['open_fd'].append(self.psutil_wrapper(p, 'num_fds', None)) ioinfo = self.psutil_wrapper(p, 'io_counters', ['read_count', 'write_count', 'read_bytes', 'write_bytes']) st['r_count'].append(ioinfo.get('read_count')) st['w_count'].append(ioinfo.get('write_count')) st['r_bytes'].append(ioinfo.get('read_bytes')) st['w_bytes'].append(ioinfo.get('write_bytes')) return st def check(self, instance): name = instance.get('name', None) tags = instance.get('tags', []) exact_match = _is_affirmative(instance.get('exact_match', True)) search_string = instance.get('search_string', None) ignore_ad = _is_affirmative(instance.get('ignore_denied_access', True)) if not isinstance(search_string, list): raise KeyError('"search_string" parameter should be a list') # FIXME 6.x remove me if "All" in search_string: self.warning('Deprecated: Having "All" in your search_string will' 'greatly reduce the performance of the check and ' 'will be removed in a future version of the agent.') if name is None: raise KeyError('The "name" of process groups is mandatory') if search_string is None: raise KeyError('The "search_string" is mandatory') pids = self.find_pids( name, search_string, exact_match, ignore_ad=ignore_ad ) proc_state = self.get_process_state(name, pids) # FIXME 6.x remove the `name` tag tags.extend(['process_name:%s' % name, name]) self.log.debug('ProcessCheck: process %s analysed', name) self.gauge('system.processes.number', len(pids), tags=tags) for attr, mname in ATTR_TO_METRIC.iteritems(): vals = [x for x in proc_state[attr] if x is not None] # skip [] if vals: # FIXME 6.x: change this prefix? self.gauge('system.processes.%s' % mname, sum(vals), tags=tags) self._process_service_check(name, len(pids), instance.get('thresholds', None)) def _process_service_check(self, name, nb_procs, bounds): ''' Report a service check, for each process in search_string. Report as OK if the process is in the warning thresholds CRITICAL out of the critical thresholds WARNING out of the warning thresholds ''' tag = ["process:%s" % name] status = AgentCheck.OK message_str = "PROCS %s: %s processes found for %s" status_str = { AgentCheck.OK: "OK", AgentCheck.WARNING: "WARNING", AgentCheck.CRITICAL: "CRITICAL" } if not bounds and nb_procs < 1: status = AgentCheck.CRITICAL elif bounds: warning = bounds.get('warning', [1, float('inf')]) critical = bounds.get('critical', [1, float('inf')]) if warning[1] < nb_procs or nb_procs < warning[0]: status = AgentCheck.WARNING if critical[1] < nb_procs or nb_procs < critical[0]: status = AgentCheck.CRITICAL self.service_check( "process.up", status, tags=tag, message=message_str % (status_str[status], nb_procs, name) )
pfmooney/dd-agent
checks.d/process.py
Python
bsd-3-clause
12,036
try: from astropy.models import ParametricModel,Parameter,_convert_input,_convert_output import numpy as np class PowerLawModel(ParametricModel): param_names = ['scale', 'alpha'] def __init__(self, scale, alpha, param_dim=1): self._scale = Parameter(name='scale', val=scale, mclass=self, param_dim=param_dim) self._alpha = Parameter(name='alpha', val=alpha, mclass=self, param_dim=param_dim) super(ParametricModel,self).__init__(self, self.param_names, ndim=1, outdim=1, param_dim=param_dim) self.linear = False self.deriv = None def eval(self, xvals, params): return params[0]*((xvals)**(-params[1])) def noderiv(self, params, xvals, yvals): deriv_dict = { 'scale': ((xvals)**(-params[1])), 'alpha': params[0]*((xvals)**(-params[1]))*np.log(xvals)} derivval = [deriv_dict[par] for par in self.param_names] return np.array(derivval).T def __call__(self, x): """ Transforms data using this model. Parameters -------------- x : array, of minimum dimensions 1 Notes ----- See the module docstring for rules for model evaluation. """ x, fmt = _convert_input(x, self.param_dim) result = self.eval(x, self.param_sets) return _convert_output(result, fmt) except ImportError: pass
vlas-sokolov/pyspeckit
pyspeckit/spectrum/models/astropy_models.py
Python
mit
1,575
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for experimental iterator_ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.data.python.ops import iterator_ops from tensorflow.python.data.ops import dataset_ops from tensorflow.python.estimator import estimator from tensorflow.python.estimator import model_fn from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import variables from tensorflow.python.platform import test from tensorflow.python.training import saver as saver_lib from tensorflow.python.training import training_util class CheckpointInputPipelineHookTest(test.TestCase): @staticmethod def _model_fn(features, labels, mode, config): del labels del mode del config global_step = training_util.get_or_create_global_step() update_global_step_op = global_step.assign_add(1) latest_feature = variables.Variable( 0, name='latest_feature', dtype=dtypes.int64) store_latest_feature_op = latest_feature.assign(features) ops.add_to_collection('my_vars', global_step) ops.add_to_collection('my_vars', latest_feature) return model_fn.EstimatorSpec( mode='train', train_op=control_flow_ops.group( [update_global_step_op, store_latest_feature_op]), loss=constant_op.constant(2.0)) def _read_vars(self, model_dir): """Returns (global_step, latest_feature).""" with ops.Graph().as_default() as g: ckpt_path = saver_lib.latest_checkpoint(model_dir) meta_filename = ckpt_path + '.meta' saver_lib.import_meta_graph(meta_filename) saver = saver_lib.Saver() with self.test_session(graph=g) as sess: saver.restore(sess, ckpt_path) return sess.run(ops.get_collection('my_vars')) def _build_iterator_saver_hook(self, est): return iterator_ops.CheckpointInputPipelineHook(est) def testReturnDatasetFromInputFn(self): def _input_fn(): return dataset_ops.Dataset.range(10) est = estimator.Estimator(model_fn=self._model_fn) est.train(_input_fn, steps=2, hooks=[self._build_iterator_saver_hook(est)]) self.assertSequenceEqual(self._read_vars(est.model_dir), (2, 1)) est.train(_input_fn, steps=2, hooks=[self._build_iterator_saver_hook(est)]) self.assertSequenceEqual(self._read_vars(est.model_dir), (4, 3)) def testBuildIteratorInInputFn(self): def _input_fn(): ds = dataset_ops.Dataset.range(10) iterator = ds.make_one_shot_iterator() return iterator.get_next() est = estimator.Estimator(model_fn=self._model_fn) est.train(_input_fn, steps=2, hooks=[self._build_iterator_saver_hook(est)]) self.assertSequenceEqual(self._read_vars(est.model_dir), (2, 1)) est.train(_input_fn, steps=2, hooks=[self._build_iterator_saver_hook(est)]) self.assertSequenceEqual(self._read_vars(est.model_dir), (4, 3)) def testDoNotRestore(self): def _input_fn(): return dataset_ops.Dataset.range(10) est = estimator.Estimator(model_fn=self._model_fn) est.train(_input_fn, steps=2, hooks=[self._build_iterator_saver_hook(est)]) self.assertSequenceEqual(self._read_vars(est.model_dir), (2, 1)) est.train(_input_fn, steps=2, hooks=[self._build_iterator_saver_hook(est)]) self.assertSequenceEqual(self._read_vars(est.model_dir), (4, 3)) # Hook not provided, input pipeline was not restored. est.train(_input_fn, steps=2) self.assertSequenceEqual(self._read_vars(est.model_dir), (6, 1)) def testRaiseErrorIfNoIterator(self): def _input_fn(): return constant_op.constant(1, dtype=dtypes.int64) est = estimator.Estimator(model_fn=self._model_fn) with self.assertRaises(ValueError): est.train( _input_fn, steps=2, hooks=[self._build_iterator_saver_hook(est)]) if __name__ == '__main__': test.main()
yanchen036/tensorflow
tensorflow/contrib/data/python/ops/iterator_ops_test.py
Python
apache-2.0
4,700
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import code import cpp_util from model import Platforms from schema_util import CapitalizeFirstLetter from schema_util import JsFunctionNameToClassName import json import os import re def _RemoveDescriptions(node): """Returns a copy of |schema| with "description" fields removed. """ if isinstance(node, dict): result = {} for key, value in node.items(): # Some schemas actually have properties called "description", so only # remove descriptions that have string values. if key == 'description' and isinstance(value, basestring): continue result[key] = _RemoveDescriptions(value) return result if isinstance(node, list): return [_RemoveDescriptions(v) for v in node] return node class CppBundleGenerator(object): """This class contains methods to generate code based on multiple schemas. """ def __init__(self, root, model, api_defs, cpp_type_generator, cpp_namespace, source_file_dir, impl_dir): self._root = root self._model = model self._api_defs = api_defs self._cpp_type_generator = cpp_type_generator self._cpp_namespace = cpp_namespace self._source_file_dir = source_file_dir self._impl_dir = impl_dir self.api_cc_generator = _APICCGenerator(self) self.api_h_generator = _APIHGenerator(self) self.schemas_cc_generator = _SchemasCCGenerator(self) self.schemas_h_generator = _SchemasHGenerator(self) def _GenerateHeader(self, file_base, body_code): """Generates a code.Code object for a header file Parameters: - |file_base| - the base of the filename, e.g. 'foo' (for 'foo.h') - |body_code| - the code to put in between the multiple inclusion guards""" c = code.Code() c.Append(cpp_util.CHROMIUM_LICENSE) c.Append() c.Append(cpp_util.GENERATED_BUNDLE_FILE_MESSAGE % self._source_file_dir) ifndef_name = cpp_util.GenerateIfndefName(self._source_file_dir, file_base) c.Append() c.Append('#ifndef %s' % ifndef_name) c.Append('#define %s' % ifndef_name) c.Append() c.Concat(body_code) c.Append() c.Append('#endif // %s' % ifndef_name) c.Append() return c def _GetPlatformIfdefs(self, model_object): """Generates the "defined" conditional for an #if check if |model_object| has platform restrictions. Returns None if there are no restrictions. """ if model_object.platforms is None: return None ifdefs = [] for platform in model_object.platforms: if platform == Platforms.CHROMEOS: ifdefs.append('defined(OS_CHROMEOS)') elif platform == Platforms.LINUX: ifdefs.append('defined(OS_LINUX)') elif platform == Platforms.MAC: ifdefs.append('defined(OS_MACOSX)') elif platform == Platforms.WIN: ifdefs.append('defined(OS_WIN)') else: raise ValueError("Unsupported platform ifdef: %s" % platform.name) return ' || '.join(ifdefs) def _GenerateRegisterFunctions(self, namespace_name, function): c = code.Code() function_ifdefs = self._GetPlatformIfdefs(function) if function_ifdefs is not None: c.Append("#if %s" % function_ifdefs, indent_level=0) function_name = JsFunctionNameToClassName(namespace_name, function.name) c.Append("registry->RegisterFunction<%sFunction>();" % ( function_name)) if function_ifdefs is not None: c.Append("#endif // %s" % function_ifdefs, indent_level=0) return c def _GenerateFunctionRegistryRegisterAll(self): c = code.Code() c.Append('// static') c.Sblock('void GeneratedFunctionRegistry::RegisterAll(' 'ExtensionFunctionRegistry* registry) {') for namespace in self._model.namespaces.values(): namespace_ifdefs = self._GetPlatformIfdefs(namespace) if namespace_ifdefs is not None: c.Append("#if %s" % namespace_ifdefs, indent_level=0) namespace_name = CapitalizeFirstLetter(namespace.name.replace( "experimental.", "")) for function in namespace.functions.values(): if function.nocompile: continue c.Concat(self._GenerateRegisterFunctions(namespace.name, function)) for type_ in namespace.types.values(): for function in type_.functions.values(): if function.nocompile: continue namespace_types_name = JsFunctionNameToClassName( namespace.name, type_.name) c.Concat(self._GenerateRegisterFunctions(namespace_types_name, function)) if namespace_ifdefs is not None: c.Append("#endif // %s" % namespace_ifdefs, indent_level=0) c.Eblock("}") return c class _APIHGenerator(object): """Generates the header for API registration / declaration""" def __init__(self, cpp_bundle): self._bundle = cpp_bundle def Generate(self, namespace): c = code.Code() c.Append('#include <string>') c.Append() c.Append('#include "base/basictypes.h"') c.Append() c.Append("class ExtensionFunctionRegistry;") c.Append() c.Concat(cpp_util.OpenNamespace(self._bundle._cpp_namespace)) c.Append() c.Append('class GeneratedFunctionRegistry {') c.Sblock(' public:') c.Append('static void RegisterAll(' 'ExtensionFunctionRegistry* registry);') c.Eblock('};') c.Append() c.Concat(cpp_util.CloseNamespace(self._bundle._cpp_namespace)) return self._bundle._GenerateHeader('generated_api', c) class _APICCGenerator(object): """Generates a code.Code object for the generated API .cc file""" def __init__(self, cpp_bundle): self._bundle = cpp_bundle def Generate(self, namespace): c = code.Code() c.Append(cpp_util.CHROMIUM_LICENSE) c.Append() c.Append('#include "%s"' % (os.path.join(self._bundle._source_file_dir, 'generated_api.h'))) c.Append() for namespace in self._bundle._model.namespaces.values(): namespace_name = namespace.unix_name.replace("experimental_", "") implementation_header = namespace.compiler_options.get( "implemented_in", "%s/%s/%s_api.h" % (self._bundle._impl_dir, namespace_name, namespace_name)) if not os.path.exists( os.path.join(self._bundle._root, os.path.normpath(implementation_header))): if "implemented_in" in namespace.compiler_options: raise ValueError('Header file for namespace "%s" specified in ' 'compiler_options not found: %s' % (namespace.unix_name, implementation_header)) continue ifdefs = self._bundle._GetPlatformIfdefs(namespace) if ifdefs is not None: c.Append("#if %s" % ifdefs, indent_level=0) c.Append('#include "%s"' % implementation_header) if ifdefs is not None: c.Append("#endif // %s" % ifdefs, indent_level=0) c.Append() c.Append('#include ' '"extensions/browser/extension_function_registry.h"') c.Append() c.Concat(cpp_util.OpenNamespace(self._bundle._cpp_namespace)) c.Append() c.Concat(self._bundle._GenerateFunctionRegistryRegisterAll()) c.Append() c.Concat(cpp_util.CloseNamespace(self._bundle._cpp_namespace)) c.Append() return c class _SchemasHGenerator(object): """Generates a code.Code object for the generated schemas .h file""" def __init__(self, cpp_bundle): self._bundle = cpp_bundle def Generate(self, namespace): c = code.Code() c.Append('#include <map>') c.Append('#include <string>') c.Append() c.Append('#include "base/strings/string_piece.h"') c.Append() c.Concat(cpp_util.OpenNamespace(self._bundle._cpp_namespace)) c.Append() c.Append('class GeneratedSchemas {') c.Sblock(' public:') c.Append('// Determines if schema named |name| is generated.') c.Append('static bool IsGenerated(std::string name);') c.Append() c.Append('// Gets the API schema named |name|.') c.Append('static base::StringPiece Get(const std::string& name);') c.Eblock('};') c.Append() c.Concat(cpp_util.CloseNamespace(self._bundle._cpp_namespace)) return self._bundle._GenerateHeader('generated_schemas', c) def _FormatNameAsConstant(name): """Formats a name to be a C++ constant of the form kConstantName""" name = '%s%s' % (name[0].upper(), name[1:]) return 'k%s' % re.sub('_[a-z]', lambda m: m.group(0)[1].upper(), name.replace('.', '_')) class _SchemasCCGenerator(object): """Generates a code.Code object for the generated schemas .cc file""" def __init__(self, cpp_bundle): self._bundle = cpp_bundle def Generate(self, namespace): c = code.Code() c.Append(cpp_util.CHROMIUM_LICENSE) c.Append() c.Append('#include "%s"' % (os.path.join(self._bundle._source_file_dir, 'generated_schemas.h'))) c.Append() c.Append('#include "base/lazy_instance.h"') c.Append() c.Append('namespace {') for api in self._bundle._api_defs: namespace = self._bundle._model.namespaces[api.get('namespace')] # JSON parsing code expects lists of schemas, so dump a singleton list. json_content = json.dumps([_RemoveDescriptions(api)], separators=(',', ':')) # Escape all double-quotes and backslashes. For this to output a valid # JSON C string, we need to escape \ and ". Note that some schemas are # too large to compile on windows. Split the JSON up into several # strings, since apparently that helps. max_length = 8192 segments = [json_content[i:i + max_length].replace('\\', '\\\\') .replace('"', '\\"') for i in xrange(0, len(json_content), max_length)] c.Append('const char %s[] = "%s";' % (_FormatNameAsConstant(namespace.name), '" "'.join(segments))) c.Append('}') c.Concat(cpp_util.OpenNamespace(self._bundle._cpp_namespace)) c.Append() c.Sblock('struct Static {') c.Sblock('Static() {') for api in self._bundle._api_defs: namespace = self._bundle._model.namespaces[api.get('namespace')] c.Append('schemas["%s"] = %s;' % (namespace.name, _FormatNameAsConstant(namespace.name))) c.Eblock('}') c.Append() c.Append('std::map<std::string, const char*> schemas;') c.Eblock('};') c.Append() c.Append('base::LazyInstance<Static> g_lazy_instance;') c.Append() c.Append('// static') c.Sblock('base::StringPiece GeneratedSchemas::Get(' 'const std::string& name) {') c.Append('return IsGenerated(name) ? ' 'g_lazy_instance.Get().schemas[name] : "";') c.Eblock('}') c.Append() c.Append('// static') c.Sblock('bool GeneratedSchemas::IsGenerated(std::string name) {') c.Append('return g_lazy_instance.Get().schemas.count(name) > 0;') c.Eblock('}') c.Append() c.Concat(cpp_util.CloseNamespace(self._bundle._cpp_namespace)) c.Append() return c
boundarydevices/android_external_chromium_org
tools/json_schema_compiler/cpp_bundle_generator.py
Python
bsd-3-clause
11,513
# Copyright Niall Douglas 2005. # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) """ >>> from voidptr_ext import * Check for correct conversion >>> use(get()) Check that None is converted to a NULL void pointer >>> useany(get()) 1 >>> useany(None) 0 Check that we don't lose type information by converting NULL opaque pointers to None >>> assert getnull() is None >>> useany(getnull()) 0 Check that there is no conversion from integers ... >>> try: use(0) ... except TypeError: pass ... else: print 'expected a TypeError' ... and from strings to opaque objects >>> try: use("") ... except TypeError: pass ... else: print 'expected a TypeError' """ def run(args = None): import sys import doctest if args is not None: sys.argv = args return doctest.testmod(sys.modules.get(__name__)) if __name__ == '__main__': print "running..." import sys status = run()[0] if (status == 0): print "Done." sys.exit(status)
alexa-infra/negine
thirdparty/boost-python/libs/python/test/voidptr.py
Python
mit
1,168
# # (c) 2016 Red Hat Inc. # # (c) 2017 Dell EMC. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from __future__ import (absolute_import, division, print_function) __metaclass__ = type import re import json from ansible.module_utils._text import to_text, to_bytes from ansible.plugins.terminal import TerminalBase from ansible.errors import AnsibleConnectionFailure class TerminalModule(TerminalBase): terminal_stdout_re = [ re.compile(br"[\r\n]?[\w+\-\.:\/\[\]]+(?:\([^\)]+\)){,3}(?:>|#) ?$"), re.compile(br"\[\w+\@[\w\-\.]+(?: [^\]])\] ?[>#\$] ?$") ] terminal_stderr_re = [ re.compile(br"% ?Bad secret"), re.compile(br"(\bInterface is part of a port-channel\b)"), re.compile(br"(\bThe maximum number of users have already been created\b)|(\bUse '-' for range\b)"), re.compile(br"(?:incomplete|ambiguous) command", re.I), re.compile(br"connection timed out", re.I), re.compile(br"'[^']' +returned error code: ?\d+"), re.compile(br"Invalid|invalid.*$", re.I), re.compile(br"((\bout of range\b)|(\bnot found\b)|(\bCould not\b)|(\bUnable to\b)|(\bCannot\b)|(\bError\b)).*", re.I), re.compile(br"((\balready exists\b)|(\bnot exist\b)|(\bnot active\b)|(\bFailed\b)|(\bIncorrect\b)|(\bnot enabled\b)).*", re.I), ] terminal_initial_prompt = br"\(y/n\)" terminal_initial_answer = b"y" terminal_inital_prompt_newline = False def on_open_shell(self): try: self._exec_cli_command(b'terminal length 0') except AnsibleConnectionFailure: raise AnsibleConnectionFailure('unable to set terminal parameters') def on_become(self, passwd=None): if self._get_prompt().endswith(b'#'): return cmd = {u'command': u'enable'} if passwd: cmd[u'prompt'] = to_text(r"[\r\n]?password:$", errors='surrogate_or_strict') cmd[u'answer'] = passwd try: self._exec_cli_command(to_bytes(json.dumps(cmd), errors='surrogate_or_strict')) except AnsibleConnectionFailure: raise AnsibleConnectionFailure('unable to elevate privilege to enable mode') # in dellos6 the terminal settings are accepted after the privilege mode try: self._exec_cli_command(b'terminal length 0') except AnsibleConnectionFailure: raise AnsibleConnectionFailure('unable to set terminal parameters') def on_unbecome(self): prompt = self._get_prompt() if prompt is None: # if prompt is None most likely the terminal is hung up at a prompt return if prompt.strip().endswith(b')#'): self._exec_cli_command(b'end') self._exec_cli_command(b'disable') elif prompt.endswith(b'#'): self._exec_cli_command(b'disable')
roadmapper/ansible
lib/ansible/plugins/terminal/dellos6.py
Python
gpl-3.0
3,474
class Module: def __init__(self, mainMenu, params=[]): # metadata info about the module, not modified during runtime self.info = { # name for the module that will appear in module menus 'Name': 'Get FileServers', # list of one or more authors for the module 'Author': ['@424f424f'], # more verbose multi-line description of the module 'Description': 'This module will list file servers', # True if the module needs to run in the background 'Background' : False, # File extension to save the file as 'OutputExtension' : "", # if the module needs administrative privileges 'NeedsAdmin' : False, # True if the method doesn't touch disk/is reasonably opsec safe 'OpsecSafe' : True, # the module language 'Language' : 'python', # the minimum language version needed 'MinLanguageVersion' : '2.6', # list of any references/other comments 'Comments': [''] } # any options needed by the module, settable during runtime self.options = { # format: # value_name : {description, required, default_value} 'Agent' : { # The 'Agent' option is the only one that MUST be in a module 'Description' : 'Agent to run on.', 'Required' : True, 'Value' : '' }, 'LDAPAddress' : { # The 'Agent' option is the only one that MUST be in a module 'Description' : 'LDAP IP/Hostname', 'Required' : True, 'Value' : '' }, 'BindDN' : { # The 'Agent' option is the only one that MUST be in a module 'Description' : '[email protected]', 'Required' : True, 'Value' : '' }, 'Password' : { # The 'Agent' option is the only one that MUST be in a module 'Description' : 'Password to connect to LDAP', 'Required' : False, 'Value' : '' } } # save off a copy of the mainMenu object to access external functionality # like listeners/agent handlers/etc. self.mainMenu = mainMenu # During instantiation, any settable option parameters # are passed as an object set to the module and the # options dictionary is automatically set. This is mostly # in case options are passed on the command line if params: for param in params: # parameter format is [Name, Value] option, value = param if option in self.options: self.options[option]['Value'] = value def generate(self, obfuscate=False, obfuscationCommand=""): LDAPAddress = self.options['LDAPAddress']['Value'] BindDN = self.options['BindDN']['Value'] password = self.options['Password']['Value'] # the Python script itself, with the command to invoke # for execution appended to the end. Scripts should output # everything to the pipeline for proper parsing. # # the script should be stripped of comments, with a link to any # original reference script included in the comments. script = """ import sys, os, subprocess, re BindDN = "%s" LDAPAddress = "%s" password = "%s" regex = re.compile('.+@([^.]+)\..+') global tld match = re.match(regex, BindDN) tld = match.group(1) global ext ext = BindDN.split('.')[1] cmd = \"""ldapsearch -x -h {} -b "dc={},dc={}" -D {} -w {} "(&(samAccountType=805306368))" ""\".format(LDAPAddress, tld, ext, BindDN, password) output = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, bufsize=1) with output.stdout: print "" for line in iter(output.stdout.readline, b''): if ("homeDirectory" or "scriptPath" or "profilePath") in line: print "Results:" print "" m = re.search(r'([^\]*)', line) if m: print m.group(1) output.wait() print "" """ % (BindDN, LDAPAddress, password) return script
EmpireProject/Empire
lib/modules/python/situational_awareness/network/active_directory/get_fileservers.py
Python
bsd-3-clause
4,433
# Volatility # Copyright (c) 2008-2013 Volatility Foundation # # This file is part of Volatility. # # Volatility is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # Volatility is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Volatility. If not, see <http://www.gnu.org/licenses/>. # """ @author: MHL @license: GNU General Public License 2.0 @contact: [email protected] This file provides support for Vista SP1 and SP2 x64 """ syscalls = [ [ 'NtMapUserPhysicalPagesScatter', # 0x0 'NtWaitForSingleObject', # 0x1 'NtCallbackReturn', # 0x2 'NtReadFile', # 0x3 'NtDeviceIoControlFile', # 0x4 'NtWriteFile', # 0x5 'NtRemoveIoCompletion', # 0x6 'NtReleaseSemaphore', # 0x7 'NtReplyWaitReceivePort', # 0x8 'NtReplyPort', # 0x9 'NtSetInformationThread', # 0xa 'NtSetEvent', # 0xb 'NtClose', # 0xc 'NtQueryObject', # 0xd 'NtQueryInformationFile', # 0xe 'NtOpenKey', # 0xf 'NtEnumerateValueKey', # 0x10 'NtFindAtom', # 0x11 'NtQueryDefaultLocale', # 0x12 'NtQueryKey', # 0x13 'NtQueryValueKey', # 0x14 'NtAllocateVirtualMemory', # 0x15 'NtQueryInformationProcess', # 0x16 'NtWaitForMultipleObjects32', # 0x17 'NtWriteFileGather', # 0x18 'NtSetInformationProcess', # 0x19 'NtCreateKey', # 0x1a 'NtFreeVirtualMemory', # 0x1b 'NtImpersonateClientOfPort', # 0x1c 'NtReleaseMutant', # 0x1d 'NtQueryInformationToken', # 0x1e 'NtRequestWaitReplyPort', # 0x1f 'NtQueryVirtualMemory', # 0x20 'NtOpenThreadToken', # 0x21 'NtQueryInformationThread', # 0x22 'NtOpenProcess', # 0x23 'NtSetInformationFile', # 0x24 'NtMapViewOfSection', # 0x25 'NtAccessCheckAndAuditAlarm', # 0x26 'NtUnmapViewOfSection', # 0x27 'NtReplyWaitReceivePortEx', # 0x28 'NtTerminateProcess', # 0x29 'NtSetEventBoostPriority', # 0x2a 'NtReadFileScatter', # 0x2b 'NtOpenThreadTokenEx', # 0x2c 'NtOpenProcessTokenEx', # 0x2d 'NtQueryPerformanceCounter', # 0x2e 'NtEnumerateKey', # 0x2f 'NtOpenFile', # 0x30 'NtDelayExecution', # 0x31 'NtQueryDirectoryFile', # 0x32 'NtQuerySystemInformation', # 0x33 'NtOpenSection', # 0x34 'NtQueryTimer', # 0x35 'NtFsControlFile', # 0x36 'NtWriteVirtualMemory', # 0x37 'NtCloseObjectAuditAlarm', # 0x38 'NtDuplicateObject', # 0x39 'NtQueryAttributesFile', # 0x3a 'NtClearEvent', # 0x3b 'NtReadVirtualMemory', # 0x3c 'NtOpenEvent', # 0x3d 'NtAdjustPrivilegesToken', # 0x3e 'NtDuplicateToken', # 0x3f 'NtContinue', # 0x40 'NtQueryDefaultUILanguage', # 0x41 'NtQueueApcThread', # 0x42 'NtYieldExecution', # 0x43 'NtAddAtom', # 0x44 'NtCreateEvent', # 0x45 'NtQueryVolumeInformationFile', # 0x46 'NtCreateSection', # 0x47 'NtFlushBuffersFile', # 0x48 'NtApphelpCacheControl', # 0x49 'NtCreateProcessEx', # 0x4a 'NtCreateThread', # 0x4b 'NtIsProcessInJob', # 0x4c 'NtProtectVirtualMemory', # 0x4d 'NtQuerySection', # 0x4e 'NtResumeThread', # 0x4f 'NtTerminateThread', # 0x50 'NtReadRequestData', # 0x51 'NtCreateFile', # 0x52 'NtQueryEvent', # 0x53 'NtWriteRequestData', # 0x54 'NtOpenDirectoryObject', # 0x55 'NtAccessCheckByTypeAndAuditAlarm', # 0x56 'NtQuerySystemTime', # 0x57 'NtWaitForMultipleObjects', # 0x58 'NtSetInformationObject', # 0x59 'NtCancelIoFile', # 0x5a 'NtTraceEvent', # 0x5b 'NtPowerInformation', # 0x5c 'NtSetValueKey', # 0x5d 'NtCancelTimer', # 0x5e 'NtSetTimer', # 0x5f 'NtAcceptConnectPort', # 0x60 'NtAccessCheck', # 0x61 'NtAccessCheckByType', # 0x62 'NtAccessCheckByTypeResultList', # 0x63 'NtAccessCheckByTypeResultListAndAuditAlarm', # 0x64 'NtAccessCheckByTypeResultListAndAuditAlarmByHandle', # 0x65 'NtAcquireCMFViewOwnership', # 0x66 'NtAddBootEntry', # 0x67 'NtAddDriverEntry', # 0x68 'NtAdjustGroupsToken', # 0x69 'NtAlertResumeThread', # 0x6a 'NtAlertThread', # 0x6b 'NtAllocateLocallyUniqueId', # 0x6c 'NtAllocateUserPhysicalPages', # 0x6d 'NtAllocateUuids', # 0x6e 'NtAlpcAcceptConnectPort', # 0x6f 'NtAlpcCancelMessage', # 0x70 'NtAlpcConnectPort', # 0x71 'NtAlpcCreatePort', # 0x72 'NtAlpcCreatePortSection', # 0x73 'NtAlpcCreateResourceReserve', # 0x74 'NtAlpcCreateSectionView', # 0x75 'NtAlpcCreateSecurityContext', # 0x76 'NtAlpcDeletePortSection', # 0x77 'NtAlpcDeleteResourceReserve', # 0x78 'NtAlpcDeleteSectionView', # 0x79 'NtAlpcDeleteSecurityContext', # 0x7a 'NtAlpcDisconnectPort', # 0x7b 'NtAlpcImpersonateClientOfPort', # 0x7c 'NtAlpcOpenSenderProcess', # 0x7d 'NtAlpcOpenSenderThread', # 0x7e 'NtAlpcQueryInformation', # 0x7f 'NtAlpcQueryInformationMessage', # 0x80 'NtAlpcRevokeSecurityContext', # 0x81 'NtAlpcSendWaitReceivePort', # 0x82 'NtAlpcSetInformation', # 0x83 'NtAreMappedFilesTheSame', # 0x84 'NtAssignProcessToJobObject', # 0x85 'NtCancelDeviceWakeupRequest', # 0x86 'NtCancelIoFileEx', # 0x87 'NtCancelSynchronousIoFile', # 0x88 'NtCommitComplete', # 0x89 'NtCommitEnlistment', # 0x8a 'NtCommitTransaction', # 0x8b 'NtCompactKeys', # 0x8c 'NtCompareTokens', # 0x8d 'NtCompleteConnectPort', # 0x8e 'NtCompressKey', # 0x8f 'NtConnectPort', # 0x90 'NtCreateDebugObject', # 0x91 'NtCreateDirectoryObject', # 0x92 'NtCreateEnlistment', # 0x93 'NtCreateEventPair', # 0x94 'NtCreateIoCompletion', # 0x95 'NtCreateJobObject', # 0x96 'NtCreateJobSet', # 0x97 'NtCreateKeyTransacted', # 0x98 'NtCreateKeyedEvent', # 0x99 'NtCreateMailslotFile', # 0x9a 'NtCreateMutant', # 0x9b 'NtCreateNamedPipeFile', # 0x9c 'NtCreatePagingFile', # 0x9d 'NtCreatePort', # 0x9e 'NtCreatePrivateNamespace', # 0x9f 'NtCreateProcess', # 0xa0 'NtCreateProfile', # 0xa1 'NtCreateResourceManager', # 0xa2 'NtCreateSemaphore', # 0xa3 'NtCreateSymbolicLinkObject', # 0xa4 'NtCreateThreadEx', # 0xa5 'NtCreateTimer', # 0xa6 'NtCreateToken', # 0xa7 'NtCreateTransaction', # 0xa8 'NtCreateTransactionManager', # 0xa9 'NtCreateUserProcess', # 0xaa 'NtCreateWaitablePort', # 0xab 'NtCreateWorkerFactory', # 0xac 'NtDebugActiveProcess', # 0xad 'NtDebugContinue', # 0xae 'NtDeleteAtom', # 0xaf 'NtDeleteBootEntry', # 0xb0 'NtDeleteDriverEntry', # 0xb1 'NtDeleteFile', # 0xb2 'NtDeleteKey', # 0xb3 'NtDeleteObjectAuditAlarm', # 0xb4 'NtDeletePrivateNamespace', # 0xb5 'NtDeleteValueKey', # 0xb6 'NtDisplayString', # 0xb7 'NtEnumerateBootEntries', # 0xb8 'NtEnumerateDriverEntries', # 0xb9 'NtEnumerateSystemEnvironmentValuesEx', # 0xba 'NtEnumerateTransactionObject', # 0xbb 'NtExtendSection', # 0xbc 'NtFilterToken', # 0xbd 'NtFlushInstallUILanguage', # 0xbe 'NtFlushInstructionCache', # 0xbf 'NtFlushKey', # 0xc0 'NtFlushProcessWriteBuffers', # 0xc1 'NtFlushVirtualMemory', # 0xc2 'NtFlushWriteBuffer', # 0xc3 'NtFreeUserPhysicalPages', # 0xc4 'NtFreezeRegistry', # 0xc5 'NtFreezeTransactions', # 0xc6 'NtGetContextThread', # 0xc7 'NtGetCurrentProcessorNumber', # 0xc8 'NtGetDevicePowerState', # 0xc9 'NtGetMUIRegistryInfo', # 0xca 'NtGetNextProcess', # 0xcb 'NtGetNextThread', # 0xcc 'NtGetNlsSectionPtr', # 0xcd 'NtGetNotificationResourceManager', # 0xce 'NtGetPlugPlayEvent', # 0xcf 'NtGetWriteWatch', # 0xd0 'NtImpersonateAnonymousToken', # 0xd1 'NtImpersonateThread', # 0xd2 'NtInitializeNlsFiles', # 0xd3 'NtInitializeRegistry', # 0xd4 'NtInitiatePowerAction', # 0xd5 'NtIsSystemResumeAutomatic', # 0xd6 'NtIsUILanguageComitted', # 0xd7 'NtListenPort', # 0xd8 'NtLoadDriver', # 0xd9 'NtLoadKey', # 0xda 'NtLoadKey2', # 0xdb 'NtLoadKeyEx', # 0xdc 'NtLockFile', # 0xdd 'NtLockProductActivationKeys', # 0xde 'NtLockRegistryKey', # 0xdf 'NtLockVirtualMemory', # 0xe0 'NtMakePermanentObject', # 0xe1 'NtMakeTemporaryObject', # 0xe2 'NtMapCMFModule', # 0xe3 'NtMapUserPhysicalPages', # 0xe4 'NtModifyBootEntry', # 0xe5 'NtModifyDriverEntry', # 0xe6 'NtNotifyChangeDirectoryFile', # 0xe7 'NtNotifyChangeKey', # 0xe8 'NtNotifyChangeMultipleKeys', # 0xe9 'NtOpenEnlistment', # 0xea 'NtOpenEventPair', # 0xeb 'NtOpenIoCompletion', # 0xec 'NtOpenJobObject', # 0xed 'NtOpenKeyTransacted', # 0xee 'NtOpenKeyedEvent', # 0xef 'NtOpenMutant', # 0xf0 'NtOpenObjectAuditAlarm', # 0xf1 'NtOpenPrivateNamespace', # 0xf2 'NtOpenProcessToken', # 0xf3 'NtOpenResourceManager', # 0xf4 'NtOpenSemaphore', # 0xf5 'NtOpenSession', # 0xf6 'NtOpenSymbolicLinkObject', # 0xf7 'NtOpenThread', # 0xf8 'NtOpenTimer', # 0xf9 'NtOpenTransaction', # 0xfa 'NtOpenTransactionManager', # 0xfb 'NtPlugPlayControl', # 0xfc 'NtPrePrepareComplete', # 0xfd 'NtPrePrepareEnlistment', # 0xfe 'NtPrepareComplete', # 0xff 'NtPrepareEnlistment', # 0x100 'NtPrivilegeCheck', # 0x101 'NtPrivilegeObjectAuditAlarm', # 0x102 'NtPrivilegedServiceAuditAlarm', # 0x103 'NtPropagationComplete', # 0x104 'NtPropagationFailed', # 0x105 'NtPulseEvent', # 0x106 'NtQueryBootEntryOrder', # 0x107 'NtQueryBootOptions', # 0x108 'NtQueryDebugFilterState', # 0x109 'NtQueryDirectoryObject', # 0x10a 'NtQueryDriverEntryOrder', # 0x10b 'NtQueryEaFile', # 0x10c 'NtQueryFullAttributesFile', # 0x10d 'NtQueryInformationAtom', # 0x10e 'NtQueryInformationEnlistment', # 0x10f 'NtQueryInformationJobObject', # 0x110 'NtQueryInformationPort', # 0x111 'NtQueryInformationResourceManager', # 0x112 'NtQueryInformationTransaction', # 0x113 'NtQueryInformationTransactionManager', # 0x114 'NtQueryInformationWorkerFactory', # 0x115 'NtQueryInstallUILanguage', # 0x116 'NtQueryIntervalProfile', # 0x117 'NtQueryIoCompletion', # 0x118 'NtQueryLicenseValue', # 0x119 'NtQueryMultipleValueKey', # 0x11a 'NtQueryMutant', # 0x11b 'NtQueryOpenSubKeys', # 0x11c 'NtQueryOpenSubKeysEx', # 0x11d 'NtQueryPortInformationProcess', # 0x11e 'NtQueryQuotaInformationFile', # 0x11f 'NtQuerySecurityObject', # 0x120 'NtQuerySemaphore', # 0x121 'NtQuerySymbolicLinkObject', # 0x122 'NtQuerySystemEnvironmentValue', # 0x123 'NtQuerySystemEnvironmentValueEx', # 0x124 'NtQueryTimerResolution', # 0x125 'NtRaiseException', # 0x126 'NtRaiseHardError', # 0x127 'NtReadOnlyEnlistment', # 0x128 'NtRecoverEnlistment', # 0x129 'NtRecoverResourceManager', # 0x12a 'NtRecoverTransactionManager', # 0x12b 'NtRegisterProtocolAddressInformation', # 0x12c 'NtRegisterThreadTerminatePort', # 0x12d 'NtReleaseCMFViewOwnership', # 0x12e 'NtReleaseKeyedEvent', # 0x12f 'NtReleaseWorkerFactoryWorker', # 0x130 'NtRemoveIoCompletionEx', # 0x131 'NtRemoveProcessDebug', # 0x132 'NtRenameKey', # 0x133 'NtRenameTransactionManager', # 0x134 'NtReplaceKey', # 0x135 'NtReplacePartitionUnit', # 0x136 'NtReplyWaitReplyPort', # 0x137 'NtRequestDeviceWakeup', # 0x138 'NtRequestPort', # 0x139 'NtRequestWakeupLatency', # 0x13a 'NtResetEvent', # 0x13b 'NtResetWriteWatch', # 0x13c 'NtRestoreKey', # 0x13d 'NtResumeProcess', # 0x13e 'NtRollbackComplete', # 0x13f 'NtRollbackEnlistment', # 0x140 'NtRollbackTransaction', # 0x141 'NtRollforwardTransactionManager', # 0x142 'NtSaveKey', # 0x143 'NtSaveKeyEx', # 0x144 'NtSaveMergedKeys', # 0x145 'NtSecureConnectPort', # 0x146 'NtSetBootEntryOrder', # 0x147 'NtSetBootOptions', # 0x148 'NtSetContextThread', # 0x149 'NtSetDebugFilterState', # 0x14a 'NtSetDefaultHardErrorPort', # 0x14b 'NtSetDefaultLocale', # 0x14c 'NtSetDefaultUILanguage', # 0x14d 'NtSetDriverEntryOrder', # 0x14e 'NtSetEaFile', # 0x14f 'NtSetHighEventPair', # 0x150 'NtSetHighWaitLowEventPair', # 0x151 'NtSetInformationDebugObject', # 0x152 'NtSetInformationEnlistment', # 0x153 'NtSetInformationJobObject', # 0x154 'NtSetInformationKey', # 0x155 'NtSetInformationResourceManager', # 0x156 'NtSetInformationToken', # 0x157 'NtSetInformationTransaction', # 0x158 'NtSetInformationTransactionManager', # 0x159 'NtSetInformationWorkerFactory', # 0x15a 'NtSetIntervalProfile', # 0x15b 'NtSetIoCompletion', # 0x15c 'NtSetLdtEntries', # 0x15d 'NtSetLowEventPair', # 0x15e 'NtSetLowWaitHighEventPair', # 0x15f 'NtSetQuotaInformationFile', # 0x160 'NtSetSecurityObject', # 0x161 'NtSetSystemEnvironmentValue', # 0x162 'NtSetSystemEnvironmentValueEx', # 0x163 'NtSetSystemInformation', # 0x164 'NtSetSystemPowerState', # 0x165 'NtSetSystemTime', # 0x166 'NtSetThreadExecutionState', # 0x167 'NtSetTimerResolution', # 0x168 'NtSetUuidSeed', # 0x169 'NtSetVolumeInformationFile', # 0x16a 'NtShutdownSystem', # 0x16b 'NtShutdownWorkerFactory', # 0x16c 'NtSignalAndWaitForSingleObject', # 0x16d 'NtSinglePhaseReject', # 0x16e 'NtStartProfile', # 0x16f 'NtStopProfile', # 0x170 'NtSuspendProcess', # 0x171 'NtSuspendThread', # 0x172 'NtSystemDebugControl', # 0x173 'NtTerminateJobObject', # 0x174 'NtTestAlert', # 0x175 'NtThawRegistry', # 0x176 'NtThawTransactions', # 0x177 'NtTraceControl', # 0x178 'NtTranslateFilePath', # 0x179 'NtUnloadDriver', # 0x17a 'NtUnloadKey', # 0x17b 'NtUnloadKey2', # 0x17c 'NtUnloadKeyEx', # 0x17d 'NtUnlockFile', # 0x17e 'NtUnlockVirtualMemory', # 0x17f 'NtVdmControl', # 0x180 'NtWaitForDebugEvent', # 0x181 'NtWaitForKeyedEvent', # 0x182 'NtWaitForWorkViaWorkerFactory', # 0x183 'NtWaitHighEventPair', # 0x184 'NtWaitLowEventPair', # 0x185 'NtWorkerFactoryWorkerReady', # 0x186 ], [ 'NtUserGetThreadState', # 0x0 'NtUserPeekMessage', # 0x1 'NtUserCallOneParam', # 0x2 'NtUserGetKeyState', # 0x3 'NtUserInvalidateRect', # 0x4 'NtUserCallNoParam', # 0x5 'NtUserGetMessage', # 0x6 'NtUserMessageCall', # 0x7 'NtGdiBitBlt', # 0x8 'NtGdiGetCharSet', # 0x9 'NtUserGetDC', # 0xa 'NtGdiSelectBitmap', # 0xb 'NtUserWaitMessage', # 0xc 'NtUserTranslateMessage', # 0xd 'NtUserGetProp', # 0xe 'NtUserPostMessage', # 0xf 'NtUserQueryWindow', # 0x10 'NtUserTranslateAccelerator', # 0x11 'NtGdiFlush', # 0x12 'NtUserRedrawWindow', # 0x13 'NtUserWindowFromPoint', # 0x14 'NtUserCallMsgFilter', # 0x15 'NtUserValidateTimerCallback', # 0x16 'NtUserBeginPaint', # 0x17 'NtUserSetTimer', # 0x18 'NtUserEndPaint', # 0x19 'NtUserSetCursor', # 0x1a 'NtUserKillTimer', # 0x1b 'NtUserBuildHwndList', # 0x1c 'NtUserSelectPalette', # 0x1d 'NtUserCallNextHookEx', # 0x1e 'NtUserHideCaret', # 0x1f 'NtGdiIntersectClipRect', # 0x20 'NtUserCallHwndLock', # 0x21 'NtUserGetProcessWindowStation', # 0x22 'NtGdiDeleteObjectApp', # 0x23 'NtUserSetWindowPos', # 0x24 'NtUserShowCaret', # 0x25 'NtUserEndDeferWindowPosEx', # 0x26 'NtUserCallHwndParamLock', # 0x27 'NtUserVkKeyScanEx', # 0x28 'NtGdiSetDIBitsToDeviceInternal', # 0x29 'NtUserCallTwoParam', # 0x2a 'NtGdiGetRandomRgn', # 0x2b 'NtUserCopyAcceleratorTable', # 0x2c 'NtUserNotifyWinEvent', # 0x2d 'NtGdiExtSelectClipRgn', # 0x2e 'NtUserIsClipboardFormatAvailable', # 0x2f 'NtUserSetScrollInfo', # 0x30 'NtGdiStretchBlt', # 0x31 'NtUserCreateCaret', # 0x32 'NtGdiRectVisible', # 0x33 'NtGdiCombineRgn', # 0x34 'NtGdiGetDCObject', # 0x35 'NtUserDispatchMessage', # 0x36 'NtUserRegisterWindowMessage', # 0x37 'NtGdiExtTextOutW', # 0x38 'NtGdiSelectFont', # 0x39 'NtGdiRestoreDC', # 0x3a 'NtGdiSaveDC', # 0x3b 'NtUserGetForegroundWindow', # 0x3c 'NtUserShowScrollBar', # 0x3d 'NtUserFindExistingCursorIcon', # 0x3e 'NtGdiGetDCDword', # 0x3f 'NtGdiGetRegionData', # 0x40 'NtGdiLineTo', # 0x41 'NtUserSystemParametersInfo', # 0x42 'NtGdiGetAppClipBox', # 0x43 'NtUserGetAsyncKeyState', # 0x44 'NtUserGetCPD', # 0x45 'NtUserRemoveProp', # 0x46 'NtGdiDoPalette', # 0x47 'NtGdiPolyPolyDraw', # 0x48 'NtUserSetCapture', # 0x49 'NtUserEnumDisplayMonitors', # 0x4a 'NtGdiCreateCompatibleBitmap', # 0x4b 'NtUserSetProp', # 0x4c 'NtGdiGetTextCharsetInfo', # 0x4d 'NtUserSBGetParms', # 0x4e 'NtUserGetIconInfo', # 0x4f 'NtUserExcludeUpdateRgn', # 0x50 'NtUserSetFocus', # 0x51 'NtGdiExtGetObjectW', # 0x52 'NtUserDeferWindowPos', # 0x53 'NtUserGetUpdateRect', # 0x54 'NtGdiCreateCompatibleDC', # 0x55 'NtUserGetClipboardSequenceNumber', # 0x56 'NtGdiCreatePen', # 0x57 'NtUserShowWindow', # 0x58 'NtUserGetKeyboardLayoutList', # 0x59 'NtGdiPatBlt', # 0x5a 'NtUserMapVirtualKeyEx', # 0x5b 'NtUserSetWindowLong', # 0x5c 'NtGdiHfontCreate', # 0x5d 'NtUserMoveWindow', # 0x5e 'NtUserPostThreadMessage', # 0x5f 'NtUserDrawIconEx', # 0x60 'NtUserGetSystemMenu', # 0x61 'NtGdiDrawStream', # 0x62 'NtUserInternalGetWindowText', # 0x63 'NtUserGetWindowDC', # 0x64 'NtGdiD3dDrawPrimitives2', # 0x65 'NtGdiInvertRgn', # 0x66 'NtGdiGetRgnBox', # 0x67 'NtGdiGetAndSetDCDword', # 0x68 'NtGdiMaskBlt', # 0x69 'NtGdiGetWidthTable', # 0x6a 'NtUserScrollDC', # 0x6b 'NtUserGetObjectInformation', # 0x6c 'NtGdiCreateBitmap', # 0x6d 'NtGdiConsoleTextOut', # 0x6e 'NtUserFindWindowEx', # 0x6f 'NtGdiPolyPatBlt', # 0x70 'NtUserUnhookWindowsHookEx', # 0x71 'NtGdiGetNearestColor', # 0x72 'NtGdiTransformPoints', # 0x73 'NtGdiGetDCPoint', # 0x74 'NtUserCheckImeHotKey', # 0x75 'NtGdiCreateDIBBrush', # 0x76 'NtGdiGetTextMetricsW', # 0x77 'NtUserCreateWindowEx', # 0x78 'NtUserSetParent', # 0x79 'NtUserGetKeyboardState', # 0x7a 'NtUserToUnicodeEx', # 0x7b 'NtUserGetControlBrush', # 0x7c 'NtUserGetClassName', # 0x7d 'NtGdiAlphaBlend', # 0x7e 'NtGdiDdBlt', # 0x7f 'NtGdiOffsetRgn', # 0x80 'NtUserDefSetText', # 0x81 'NtGdiGetTextFaceW', # 0x82 'NtGdiStretchDIBitsInternal', # 0x83 'NtUserSendInput', # 0x84 'NtUserGetThreadDesktop', # 0x85 'NtGdiCreateRectRgn', # 0x86 'NtGdiGetDIBitsInternal', # 0x87 'NtUserGetUpdateRgn', # 0x88 'NtGdiDeleteClientObj', # 0x89 'NtUserGetIconSize', # 0x8a 'NtUserFillWindow', # 0x8b 'NtGdiExtCreateRegion', # 0x8c 'NtGdiComputeXformCoefficients', # 0x8d 'NtUserSetWindowsHookEx', # 0x8e 'NtUserNotifyProcessCreate', # 0x8f 'NtGdiUnrealizeObject', # 0x90 'NtUserGetTitleBarInfo', # 0x91 'NtGdiRectangle', # 0x92 'NtUserSetThreadDesktop', # 0x93 'NtUserGetDCEx', # 0x94 'NtUserGetScrollBarInfo', # 0x95 'NtGdiGetTextExtent', # 0x96 'NtUserSetWindowFNID', # 0x97 'NtGdiSetLayout', # 0x98 'NtUserCalcMenuBar', # 0x99 'NtUserThunkedMenuItemInfo', # 0x9a 'NtGdiExcludeClipRect', # 0x9b 'NtGdiCreateDIBSection', # 0x9c 'NtGdiGetDCforBitmap', # 0x9d 'NtUserDestroyCursor', # 0x9e 'NtUserDestroyWindow', # 0x9f 'NtUserCallHwndParam', # 0xa0 'NtGdiCreateDIBitmapInternal', # 0xa1 'NtUserOpenWindowStation', # 0xa2 'NtGdiDdDeleteSurfaceObject', # 0xa3 'NtGdiEnumFontClose', # 0xa4 'NtGdiEnumFontOpen', # 0xa5 'NtGdiEnumFontChunk', # 0xa6 'NtGdiDdCanCreateSurface', # 0xa7 'NtGdiDdCreateSurface', # 0xa8 'NtUserSetCursorIconData', # 0xa9 'NtGdiDdDestroySurface', # 0xaa 'NtUserCloseDesktop', # 0xab 'NtUserOpenDesktop', # 0xac 'NtUserSetProcessWindowStation', # 0xad 'NtUserGetAtomName', # 0xae 'NtGdiDdResetVisrgn', # 0xaf 'NtGdiExtCreatePen', # 0xb0 'NtGdiCreatePaletteInternal', # 0xb1 'NtGdiSetBrushOrg', # 0xb2 'NtUserBuildNameList', # 0xb3 'NtGdiSetPixel', # 0xb4 'NtUserRegisterClassExWOW', # 0xb5 'NtGdiCreatePatternBrushInternal', # 0xb6 'NtUserGetAncestor', # 0xb7 'NtGdiGetOutlineTextMetricsInternalW', # 0xb8 'NtGdiSetBitmapBits', # 0xb9 'NtUserCloseWindowStation', # 0xba 'NtUserGetDoubleClickTime', # 0xbb 'NtUserEnableScrollBar', # 0xbc 'NtGdiCreateSolidBrush', # 0xbd 'NtUserGetClassInfoEx', # 0xbe 'NtGdiCreateClientObj', # 0xbf 'NtUserUnregisterClass', # 0xc0 'NtUserDeleteMenu', # 0xc1 'NtGdiRectInRegion', # 0xc2 'NtUserScrollWindowEx', # 0xc3 'NtGdiGetPixel', # 0xc4 'NtUserSetClassLong', # 0xc5 'NtUserGetMenuBarInfo', # 0xc6 'NtGdiDdCreateSurfaceEx', # 0xc7 'NtGdiDdCreateSurfaceObject', # 0xc8 'NtGdiGetNearestPaletteIndex', # 0xc9 'NtGdiDdLockD3D', # 0xca 'NtGdiDdUnlockD3D', # 0xcb 'NtGdiGetCharWidthW', # 0xcc 'NtUserInvalidateRgn', # 0xcd 'NtUserGetClipboardOwner', # 0xce 'NtUserSetWindowRgn', # 0xcf 'NtUserBitBltSysBmp', # 0xd0 'NtGdiGetCharWidthInfo', # 0xd1 'NtUserValidateRect', # 0xd2 'NtUserCloseClipboard', # 0xd3 'NtUserOpenClipboard', # 0xd4 'NtGdiGetStockObject', # 0xd5 'NtUserSetClipboardData', # 0xd6 'NtUserEnableMenuItem', # 0xd7 'NtUserAlterWindowStyle', # 0xd8 'NtGdiFillRgn', # 0xd9 'NtUserGetWindowPlacement', # 0xda 'NtGdiModifyWorldTransform', # 0xdb 'NtGdiGetFontData', # 0xdc 'NtUserGetOpenClipboardWindow', # 0xdd 'NtUserSetThreadState', # 0xde 'NtGdiOpenDCW', # 0xdf 'NtUserTrackMouseEvent', # 0xe0 'NtGdiGetTransform', # 0xe1 'NtUserDestroyMenu', # 0xe2 'NtGdiGetBitmapBits', # 0xe3 'NtUserConsoleControl', # 0xe4 'NtUserSetActiveWindow', # 0xe5 'NtUserSetInformationThread', # 0xe6 'NtUserSetWindowPlacement', # 0xe7 'NtUserGetControlColor', # 0xe8 'NtGdiSetMetaRgn', # 0xe9 'NtGdiSetMiterLimit', # 0xea 'NtGdiSetVirtualResolution', # 0xeb 'NtGdiGetRasterizerCaps', # 0xec 'NtUserSetWindowWord', # 0xed 'NtUserGetClipboardFormatName', # 0xee 'NtUserRealInternalGetMessage', # 0xef 'NtUserCreateLocalMemHandle', # 0xf0 'NtUserAttachThreadInput', # 0xf1 'NtGdiCreateHalftonePalette', # 0xf2 'NtUserPaintMenuBar', # 0xf3 'NtUserSetKeyboardState', # 0xf4 'NtGdiCombineTransform', # 0xf5 'NtUserCreateAcceleratorTable', # 0xf6 'NtUserGetCursorFrameInfo', # 0xf7 'NtUserGetAltTabInfo', # 0xf8 'NtUserGetCaretBlinkTime', # 0xf9 'NtGdiQueryFontAssocInfo', # 0xfa 'NtUserProcessConnect', # 0xfb 'NtUserEnumDisplayDevices', # 0xfc 'NtUserEmptyClipboard', # 0xfd 'NtUserGetClipboardData', # 0xfe 'NtUserRemoveMenu', # 0xff 'NtGdiSetBoundsRect', # 0x100 'NtUserSetInformationProcess', # 0x101 'NtGdiGetBitmapDimension', # 0x102 'NtUserConvertMemHandle', # 0x103 'NtUserDestroyAcceleratorTable', # 0x104 'NtUserGetGUIThreadInfo', # 0x105 'NtGdiCloseFigure', # 0x106 'NtUserSetWindowsHookAW', # 0x107 'NtUserSetMenuDefaultItem', # 0x108 'NtUserCheckMenuItem', # 0x109 'NtUserSetWinEventHook', # 0x10a 'NtUserUnhookWinEvent', # 0x10b 'NtGdiSetupPublicCFONT', # 0x10c 'NtUserLockWindowUpdate', # 0x10d 'NtUserSetSystemMenu', # 0x10e 'NtUserThunkedMenuInfo', # 0x10f 'NtGdiBeginPath', # 0x110 'NtGdiEndPath', # 0x111 'NtGdiFillPath', # 0x112 'NtUserCallHwnd', # 0x113 'NtUserDdeInitialize', # 0x114 'NtUserModifyUserStartupInfoFlags', # 0x115 'NtUserCountClipboardFormats', # 0x116 'NtGdiAddFontMemResourceEx', # 0x117 'NtGdiEqualRgn', # 0x118 'NtGdiGetSystemPaletteUse', # 0x119 'NtGdiRemoveFontMemResourceEx', # 0x11a 'NtUserEnumDisplaySettings', # 0x11b 'NtUserPaintDesktop', # 0x11c 'NtGdiExtEscape', # 0x11d 'NtGdiSetBitmapDimension', # 0x11e 'NtGdiSetFontEnumeration', # 0x11f 'NtUserChangeClipboardChain', # 0x120 'NtUserResolveDesktop', # 0x121 'NtUserSetClipboardViewer', # 0x122 'NtUserShowWindowAsync', # 0x123 'NtUserSetConsoleReserveKeys', # 0x124 'NtGdiCreateColorSpace', # 0x125 'NtGdiDeleteColorSpace', # 0x126 'NtUserActivateKeyboardLayout', # 0x127 'NtGdiAbortDoc', # 0x128 'NtGdiAbortPath', # 0x129 'NtGdiAddEmbFontToDC', # 0x12a 'NtGdiAddFontResourceW', # 0x12b 'NtGdiAddRemoteFontToDC', # 0x12c 'NtGdiAddRemoteMMInstanceToDC', # 0x12d 'NtGdiAngleArc', # 0x12e 'NtGdiAnyLinkedFonts', # 0x12f 'NtGdiArcInternal', # 0x130 'NtGdiBRUSHOBJ_DeleteRbrush', # 0x131 'NtGdiBRUSHOBJ_hGetColorTransform', # 0x132 'NtGdiBRUSHOBJ_pvAllocRbrush', # 0x133 'NtGdiBRUSHOBJ_pvGetRbrush', # 0x134 'NtGdiBRUSHOBJ_ulGetBrushColor', # 0x135 'NtGdiCLIPOBJ_bEnum', # 0x136 'NtGdiCLIPOBJ_cEnumStart', # 0x137 'NtGdiCLIPOBJ_ppoGetPath', # 0x138 'NtGdiCancelDC', # 0x139 'NtGdiChangeGhostFont', # 0x13a 'NtGdiCheckBitmapBits', # 0x13b 'NtGdiClearBitmapAttributes', # 0x13c 'NtGdiClearBrushAttributes', # 0x13d 'NtGdiColorCorrectPalette', # 0x13e 'NtGdiConfigureOPMProtectedOutput', # 0x13f 'NtGdiConvertMetafileRect', # 0x140 'NtGdiCreateColorTransform', # 0x141 'NtGdiCreateEllipticRgn', # 0x142 'NtGdiCreateHatchBrushInternal', # 0x143 'NtGdiCreateMetafileDC', # 0x144 'NtGdiCreateOPMProtectedOutputs', # 0x145 'NtGdiCreateRoundRectRgn', # 0x146 'NtGdiCreateServerMetaFile', # 0x147 'NtGdiD3dContextCreate', # 0x148 'NtGdiD3dContextDestroy', # 0x149 'NtGdiD3dContextDestroyAll', # 0x14a 'NtGdiD3dValidateTextureStageState', # 0x14b 'NtGdiDDCCIGetCapabilitiesString', # 0x14c 'NtGdiDDCCIGetCapabilitiesStringLength', # 0x14d 'NtGdiDDCCIGetTimingReport', # 0x14e 'NtGdiDDCCIGetVCPFeature', # 0x14f 'NtGdiDDCCISaveCurrentSettings', # 0x150 'NtGdiDDCCISetVCPFeature', # 0x151 'NtGdiDdAddAttachedSurface', # 0x152 'NtGdiDdAlphaBlt', # 0x153 'NtGdiDdAttachSurface', # 0x154 'NtGdiDdBeginMoCompFrame', # 0x155 'NtGdiDdCanCreateD3DBuffer', # 0x156 'NtGdiDdColorControl', # 0x157 'NtGdiDdCreateD3DBuffer', # 0x158 'NtGdiDdCreateDirectDrawObject', # 0x159 'NtGdiDdCreateMoComp', # 0x15a 'NtGdiDdDDICheckExclusiveOwnership', # 0x15b 'NtGdiDdDDICheckMonitorPowerState', # 0x15c 'NtGdiDdDDICheckOcclusion', # 0x15d 'NtGdiDdDDICloseAdapter', # 0x15e 'NtGdiDdDDICreateAllocation', # 0x15f 'NtGdiDdDDICreateContext', # 0x160 'NtGdiDdDDICreateDCFromMemory', # 0x161 'NtGdiDdDDICreateDevice', # 0x162 'NtGdiDdDDICreateOverlay', # 0x163 'NtGdiDdDDICreateSynchronizationObject', # 0x164 'NtGdiDdDDIDestroyAllocation', # 0x165 'NtGdiDdDDIDestroyContext', # 0x166 'NtGdiDdDDIDestroyDCFromMemory', # 0x167 'NtGdiDdDDIDestroyDevice', # 0x168 'NtGdiDdDDIDestroyOverlay', # 0x169 'NtGdiDdDDIDestroySynchronizationObject', # 0x16a 'NtGdiDdDDIEscape', # 0x16b 'NtGdiDdDDIFlipOverlay', # 0x16c 'NtGdiDdDDIGetContextSchedulingPriority', # 0x16d 'NtGdiDdDDIGetDeviceState', # 0x16e 'NtGdiDdDDIGetDisplayModeList', # 0x16f 'NtGdiDdDDIGetMultisampleMethodList', # 0x170 'NtGdiDdDDIGetPresentHistory', # 0x171 'NtGdiDdDDIGetProcessSchedulingPriorityClass', # 0x172 'NtGdiDdDDIGetRuntimeData', # 0x173 'NtGdiDdDDIGetScanLine', # 0x174 'NtGdiDdDDIGetSharedPrimaryHandle', # 0x175 'NtGdiDdDDIInvalidateActiveVidPn', # 0x176 'NtGdiDdDDILock', # 0x177 'NtGdiDdDDIOpenAdapterFromDeviceName', # 0x178 'NtGdiDdDDIOpenAdapterFromHdc', # 0x179 'NtGdiDdDDIOpenResource', # 0x17a 'NtGdiDdDDIPollDisplayChildren', # 0x17b 'NtGdiDdDDIPresent', # 0x17c 'NtGdiDdDDIQueryAdapterInfo', # 0x17d 'NtGdiDdDDIQueryAllocationResidency', # 0x17e 'NtGdiDdDDIQueryResourceInfo', # 0x17f 'NtGdiDdDDIQueryStatistics', # 0x180 'NtGdiDdDDIReleaseProcessVidPnSourceOwners', # 0x181 'NtGdiDdDDIRender', # 0x182 'NtGdiDdDDISetAllocationPriority', # 0x183 'NtGdiDdDDISetContextSchedulingPriority', # 0x184 'NtGdiDdDDISetDisplayMode', # 0x185 'NtGdiDdDDISetDisplayPrivateDriverFormat', # 0x186 'NtGdiDdDDISetGammaRamp', # 0x187 'NtGdiDdDDISetProcessSchedulingPriorityClass', # 0x188 'NtGdiDdDDISetQueuedLimit', # 0x189 'NtGdiDdDDISetVidPnSourceOwner', # 0x18a 'NtGdiDdDDISharedPrimaryLockNotification', # 0x18b 'NtGdiDdDDISharedPrimaryUnLockNotification', # 0x18c 'NtGdiDdDDISignalSynchronizationObject', # 0x18d 'NtGdiDdDDIUnlock', # 0x18e 'NtGdiDdDDIUpdateOverlay', # 0x18f 'NtGdiDdDDIWaitForIdle', # 0x190 'NtGdiDdDDIWaitForSynchronizationObject', # 0x191 'NtGdiDdDDIWaitForVerticalBlankEvent', # 0x192 'NtGdiDdDeleteDirectDrawObject', # 0x193 'NtGdiDdDestroyD3DBuffer', # 0x194 'NtGdiDdDestroyMoComp', # 0x195 'NtGdiDdEndMoCompFrame', # 0x196 'NtGdiDdFlip', # 0x197 'NtGdiDdFlipToGDISurface', # 0x198 'NtGdiDdGetAvailDriverMemory', # 0x199 'NtGdiDdGetBltStatus', # 0x19a 'NtGdiDdGetDC', # 0x19b 'NtGdiDdGetDriverInfo', # 0x19c 'NtGdiDdGetDriverState', # 0x19d 'NtGdiDdGetDxHandle', # 0x19e 'NtGdiDdGetFlipStatus', # 0x19f 'NtGdiDdGetInternalMoCompInfo', # 0x1a0 'NtGdiDdGetMoCompBuffInfo', # 0x1a1 'NtGdiDdGetMoCompFormats', # 0x1a2 'NtGdiDdGetMoCompGuids', # 0x1a3 'NtGdiDdGetScanLine', # 0x1a4 'NtGdiDdLock', # 0x1a5 'NtGdiDdQueryDirectDrawObject', # 0x1a6 'NtGdiDdQueryMoCompStatus', # 0x1a7 'NtGdiDdReenableDirectDrawObject', # 0x1a8 'NtGdiDdReleaseDC', # 0x1a9 'NtGdiDdRenderMoComp', # 0x1aa 'NtGdiDdSetColorKey', # 0x1ab 'NtGdiDdSetExclusiveMode', # 0x1ac 'NtGdiDdSetGammaRamp', # 0x1ad 'NtGdiDdSetOverlayPosition', # 0x1ae 'NtGdiDdUnattachSurface', # 0x1af 'NtGdiDdUnlock', # 0x1b0 'NtGdiDdUpdateOverlay', # 0x1b1 'NtGdiDdWaitForVerticalBlank', # 0x1b2 'NtGdiDeleteColorTransform', # 0x1b3 'NtGdiDescribePixelFormat', # 0x1b4 'NtGdiDestroyOPMProtectedOutput', # 0x1b5 'NtGdiDestroyPhysicalMonitor', # 0x1b6 'NtGdiDoBanding', # 0x1b7 'NtGdiDrawEscape', # 0x1b8 'NtGdiDvpAcquireNotification', # 0x1b9 'NtGdiDvpCanCreateVideoPort', # 0x1ba 'NtGdiDvpColorControl', # 0x1bb 'NtGdiDvpCreateVideoPort', # 0x1bc 'NtGdiDvpDestroyVideoPort', # 0x1bd 'NtGdiDvpFlipVideoPort', # 0x1be 'NtGdiDvpGetVideoPortBandwidth', # 0x1bf 'NtGdiDvpGetVideoPortConnectInfo', # 0x1c0 'NtGdiDvpGetVideoPortField', # 0x1c1 'NtGdiDvpGetVideoPortFlipStatus', # 0x1c2 'NtGdiDvpGetVideoPortInputFormats', # 0x1c3 'NtGdiDvpGetVideoPortLine', # 0x1c4 'NtGdiDvpGetVideoPortOutputFormats', # 0x1c5 'NtGdiDvpGetVideoSignalStatus', # 0x1c6 'NtGdiDvpReleaseNotification', # 0x1c7 'NtGdiDvpUpdateVideoPort', # 0x1c8 'NtGdiDvpWaitForVideoPortSync', # 0x1c9 'NtGdiDwmGetDirtyRgn', # 0x1ca 'NtGdiDwmGetSurfaceData', # 0x1cb 'NtGdiDxgGenericThunk', # 0x1cc 'NtGdiEllipse', # 0x1cd 'NtGdiEnableEudc', # 0x1ce 'NtGdiEndDoc', # 0x1cf 'NtGdiEndPage', # 0x1d0 'NtGdiEngAlphaBlend', # 0x1d1 'NtGdiEngAssociateSurface', # 0x1d2 'NtGdiEngBitBlt', # 0x1d3 'NtGdiEngCheckAbort', # 0x1d4 'NtGdiEngComputeGlyphSet', # 0x1d5 'NtGdiEngCopyBits', # 0x1d6 'NtGdiEngCreateBitmap', # 0x1d7 'NtGdiEngCreateClip', # 0x1d8 'NtGdiEngCreateDeviceBitmap', # 0x1d9 'NtGdiEngCreateDeviceSurface', # 0x1da 'NtGdiEngCreatePalette', # 0x1db 'NtGdiEngDeleteClip', # 0x1dc 'NtGdiEngDeletePalette', # 0x1dd 'NtGdiEngDeletePath', # 0x1de 'NtGdiEngDeleteSurface', # 0x1df 'NtGdiEngEraseSurface', # 0x1e0 'NtGdiEngFillPath', # 0x1e1 'NtGdiEngGradientFill', # 0x1e2 'NtGdiEngLineTo', # 0x1e3 'NtGdiEngLockSurface', # 0x1e4 'NtGdiEngMarkBandingSurface', # 0x1e5 'NtGdiEngPaint', # 0x1e6 'NtGdiEngPlgBlt', # 0x1e7 'NtGdiEngStretchBlt', # 0x1e8 'NtGdiEngStretchBltROP', # 0x1e9 'NtGdiEngStrokeAndFillPath', # 0x1ea 'NtGdiEngStrokePath', # 0x1eb 'NtGdiEngTextOut', # 0x1ec 'NtGdiEngTransparentBlt', # 0x1ed 'NtGdiEngUnlockSurface', # 0x1ee 'NtGdiEnumObjects', # 0x1ef 'NtGdiEudcLoadUnloadLink', # 0x1f0 'NtGdiExtFloodFill', # 0x1f1 'NtGdiFONTOBJ_cGetAllGlyphHandles', # 0x1f2 'NtGdiFONTOBJ_cGetGlyphs', # 0x1f3 'NtGdiFONTOBJ_pQueryGlyphAttrs', # 0x1f4 'NtGdiFONTOBJ_pfdg', # 0x1f5 'NtGdiFONTOBJ_pifi', # 0x1f6 'NtGdiFONTOBJ_pvTrueTypeFontFile', # 0x1f7 'NtGdiFONTOBJ_pxoGetXform', # 0x1f8 'NtGdiFONTOBJ_vGetInfo', # 0x1f9 'NtGdiFlattenPath', # 0x1fa 'NtGdiFontIsLinked', # 0x1fb 'NtGdiForceUFIMapping', # 0x1fc 'NtGdiFrameRgn', # 0x1fd 'NtGdiFullscreenControl', # 0x1fe 'NtGdiGetBoundsRect', # 0x1ff 'NtGdiGetCOPPCompatibleOPMInformation', # 0x200 'NtGdiGetCertificate', # 0x201 'NtGdiGetCertificateSize', # 0x202 'NtGdiGetCharABCWidthsW', # 0x203 'NtGdiGetCharacterPlacementW', # 0x204 'NtGdiGetColorAdjustment', # 0x205 'NtGdiGetColorSpaceforBitmap', # 0x206 'NtGdiGetDeviceCaps', # 0x207 'NtGdiGetDeviceCapsAll', # 0x208 'NtGdiGetDeviceGammaRamp', # 0x209 'NtGdiGetDeviceWidth', # 0x20a 'NtGdiGetDhpdev', # 0x20b 'NtGdiGetETM', # 0x20c 'NtGdiGetEmbUFI', # 0x20d 'NtGdiGetEmbedFonts', # 0x20e 'NtGdiGetEudcTimeStampEx', # 0x20f 'NtGdiGetFontResourceInfoInternalW', # 0x210 'NtGdiGetFontUnicodeRanges', # 0x211 'NtGdiGetGlyphIndicesW', # 0x212 'NtGdiGetGlyphIndicesWInternal', # 0x213 'NtGdiGetGlyphOutline', # 0x214 'NtGdiGetKerningPairs', # 0x215 'NtGdiGetLinkedUFIs', # 0x216 'NtGdiGetMiterLimit', # 0x217 'NtGdiGetMonitorID', # 0x218 'NtGdiGetNumberOfPhysicalMonitors', # 0x219 'NtGdiGetOPMInformation', # 0x21a 'NtGdiGetOPMRandomNumber', # 0x21b 'NtGdiGetObjectBitmapHandle', # 0x21c 'NtGdiGetPath', # 0x21d 'NtGdiGetPerBandInfo', # 0x21e 'NtGdiGetPhysicalMonitorDescription', # 0x21f 'NtGdiGetPhysicalMonitors', # 0x220 'NtGdiGetRealizationInfo', # 0x221 'NtGdiGetServerMetaFileBits', # 0x222 'NtGdiGetSpoolMessage', # 0x223 'NtGdiGetStats', # 0x224 'NtGdiGetStringBitmapW', # 0x225 'NtGdiGetSuggestedOPMProtectedOutputArraySize', # 0x226 'NtGdiGetTextExtentExW', # 0x227 'NtGdiGetUFI', # 0x228 'NtGdiGetUFIPathname', # 0x229 'NtGdiGradientFill', # 0x22a 'NtGdiHT_Get8BPPFormatPalette', # 0x22b 'NtGdiHT_Get8BPPMaskPalette', # 0x22c 'NtGdiIcmBrushInfo', # 0x22d 'NtGdiInit', # 0x22e 'NtGdiInitSpool', # 0x22f 'NtGdiMakeFontDir', # 0x230 'NtGdiMakeInfoDC', # 0x231 'NtGdiMakeObjectUnXferable', # 0x232 'NtGdiMakeObjectXferable', # 0x233 'NtGdiMirrorWindowOrg', # 0x234 'NtGdiMonoBitmap', # 0x235 'NtGdiMoveTo', # 0x236 'NtGdiOffsetClipRgn', # 0x237 'NtGdiPATHOBJ_bEnum', # 0x238 'NtGdiPATHOBJ_bEnumClipLines', # 0x239 'NtGdiPATHOBJ_vEnumStart', # 0x23a 'NtGdiPATHOBJ_vEnumStartClipLines', # 0x23b 'NtGdiPATHOBJ_vGetBounds', # 0x23c 'NtGdiPathToRegion', # 0x23d 'NtGdiPlgBlt', # 0x23e 'NtGdiPolyDraw', # 0x23f 'NtGdiPolyTextOutW', # 0x240 'NtGdiPtInRegion', # 0x241 'NtGdiPtVisible', # 0x242 'NtGdiQueryFonts', # 0x243 'NtGdiRemoveFontResourceW', # 0x244 'NtGdiRemoveMergeFont', # 0x245 'NtGdiResetDC', # 0x246 'NtGdiResizePalette', # 0x247 'NtGdiRoundRect', # 0x248 'NtGdiSTROBJ_bEnum', # 0x249 'NtGdiSTROBJ_bEnumPositionsOnly', # 0x24a 'NtGdiSTROBJ_bGetAdvanceWidths', # 0x24b 'NtGdiSTROBJ_dwGetCodePage', # 0x24c 'NtGdiSTROBJ_vEnumStart', # 0x24d 'NtGdiScaleViewportExtEx', # 0x24e 'NtGdiScaleWindowExtEx', # 0x24f 'NtGdiSelectBrush', # 0x250 'NtGdiSelectClipPath', # 0x251 'NtGdiSelectPen', # 0x252 'NtGdiSetBitmapAttributes', # 0x253 'NtGdiSetBrushAttributes', # 0x254 'NtGdiSetColorAdjustment', # 0x255 'NtGdiSetColorSpace', # 0x256 'NtGdiSetDeviceGammaRamp', # 0x257 'NtGdiSetFontXform', # 0x258 'NtGdiSetIcmMode', # 0x259 'NtGdiSetLinkedUFIs', # 0x25a 'NtGdiSetMagicColors', # 0x25b 'NtGdiSetOPMSigningKeyAndSequenceNumbers', # 0x25c 'NtGdiSetPUMPDOBJ', # 0x25d 'NtGdiSetPixelFormat', # 0x25e 'NtGdiSetRectRgn', # 0x25f 'NtGdiSetSizeDevice', # 0x260 'NtGdiSetSystemPaletteUse', # 0x261 'NtGdiSetTextJustification', # 0x262 'NtGdiStartDoc', # 0x263 'NtGdiStartPage', # 0x264 'NtGdiStrokeAndFillPath', # 0x265 'NtGdiStrokePath', # 0x266 'NtGdiSwapBuffers', # 0x267 'NtGdiTransparentBlt', # 0x268 'NtGdiUMPDEngFreeUserMem', # 0x269 'NtGdiUnloadPrinterDriver', # 0x26a 'NtGdiUnmapMemFont', # 0x26b 'NtGdiUpdateColors', # 0x26c 'NtGdiUpdateTransform', # 0x26d 'NtGdiWidenPath', # 0x26e 'NtGdiXFORMOBJ_bApplyXform', # 0x26f 'NtGdiXFORMOBJ_iGetXform', # 0x270 'NtGdiXLATEOBJ_cGetPalette', # 0x271 'NtGdiXLATEOBJ_hGetColorTransform', # 0x272 'NtGdiXLATEOBJ_iXlate', # 0x273 'NtUserAddClipboardFormatListener', # 0x274 'NtUserAssociateInputContext', # 0x275 'NtUserBlockInput', # 0x276 'NtUserBuildHimcList', # 0x277 'NtUserBuildPropList', # 0x278 'NtUserCallHwndOpt', # 0x279 'NtUserChangeDisplaySettings', # 0x27a 'NtUserCheckAccessForIntegrityLevel', # 0x27b 'NtUserCheckDesktopByThreadId', # 0x27c 'NtUserCheckWindowThreadDesktop', # 0x27d 'NtUserChildWindowFromPointEx', # 0x27e 'NtUserClipCursor', # 0x27f 'NtUserCreateDesktopEx', # 0x280 'NtUserCreateInputContext', # 0x281 'NtUserCreateWindowStation', # 0x282 'NtUserCtxDisplayIOCtl', # 0x283 'NtUserDestroyInputContext', # 0x284 'NtUserDisableThreadIme', # 0x285 'NtUserDoSoundConnect', # 0x286 'NtUserDoSoundDisconnect', # 0x287 'NtUserDragDetect', # 0x288 'NtUserDragObject', # 0x289 'NtUserDrawAnimatedRects', # 0x28a 'NtUserDrawCaption', # 0x28b 'NtUserDrawCaptionTemp', # 0x28c 'NtUserDrawMenuBarTemp', # 0x28d 'NtUserDwmGetDxRgn', # 0x28e 'NtUserDwmHintDxUpdate', # 0x28f 'NtUserDwmStartRedirection', # 0x290 'NtUserDwmStopRedirection', # 0x291 'NtUserEndMenu', # 0x292 'NtUserEvent', # 0x293 'NtUserFlashWindowEx', # 0x294 'NtUserFrostCrashedWindow', # 0x295 'NtUserGetAppImeLevel', # 0x296 'NtUserGetCaretPos', # 0x297 'NtUserGetClipCursor', # 0x298 'NtUserGetClipboardViewer', # 0x299 'NtUserGetComboBoxInfo', # 0x29a 'NtUserGetCursorInfo', # 0x29b 'NtUserGetGuiResources', # 0x29c 'NtUserGetImeHotKey', # 0x29d 'NtUserGetImeInfoEx', # 0x29e 'NtUserGetInternalWindowPos', # 0x29f 'NtUserGetKeyNameText', # 0x2a0 'NtUserGetKeyboardLayoutName', # 0x2a1 'NtUserGetLayeredWindowAttributes', # 0x2a2 'NtUserGetListBoxInfo', # 0x2a3 'NtUserGetMenuIndex', # 0x2a4 'NtUserGetMenuItemRect', # 0x2a5 'NtUserGetMouseMovePointsEx', # 0x2a6 'NtUserGetPriorityClipboardFormat', # 0x2a7 'NtUserGetRawInputBuffer', # 0x2a8 'NtUserGetRawInputData', # 0x2a9 'NtUserGetRawInputDeviceInfo', # 0x2aa 'NtUserGetRawInputDeviceList', # 0x2ab 'NtUserGetRegisteredRawInputDevices', # 0x2ac 'NtUserGetUpdatedClipboardFormats', # 0x2ad 'NtUserGetWOWClass', # 0x2ae 'NtUserGetWindowMinimizeRect', # 0x2af 'NtUserGetWindowRgnEx', # 0x2b0 'NtUserGhostWindowFromHungWindow', # 0x2b1 'NtUserHardErrorControl', # 0x2b2 'NtUserHiliteMenuItem', # 0x2b3 'NtUserHungWindowFromGhostWindow', # 0x2b4 'NtUserImpersonateDdeClientWindow', # 0x2b5 'NtUserInitTask', # 0x2b6 'NtUserInitialize', # 0x2b7 'NtUserInitializeClientPfnArrays', # 0x2b8 'NtUserInternalGetWindowIcon', # 0x2b9 'NtUserLoadKeyboardLayoutEx', # 0x2ba 'NtUserLockWindowStation', # 0x2bb 'NtUserLockWorkStation', # 0x2bc 'NtUserLogicalToPhysicalPoint', # 0x2bd 'NtUserMNDragLeave', # 0x2be 'NtUserMNDragOver', # 0x2bf 'NtUserMenuItemFromPoint', # 0x2c0 'NtUserMinMaximize', # 0x2c1 'NtUserNotifyIMEStatus', # 0x2c2 'NtUserOpenInputDesktop', # 0x2c3 'NtUserOpenThreadDesktop', # 0x2c4 'NtUserPaintMonitor', # 0x2c5 'NtUserPhysicalToLogicalPoint', # 0x2c6 'NtUserPrintWindow', # 0x2c7 'NtUserQueryInformationThread', # 0x2c8 'NtUserQueryInputContext', # 0x2c9 'NtUserQuerySendMessage', # 0x2ca 'NtUserRealChildWindowFromPoint', # 0x2cb 'NtUserRealWaitMessageEx', # 0x2cc 'NtUserRegisterErrorReportingDialog', # 0x2cd 'NtUserRegisterHotKey', # 0x2ce 'NtUserRegisterRawInputDevices', # 0x2cf 'NtUserRegisterSessionPort', # 0x2d0 'NtUserRegisterTasklist', # 0x2d1 'NtUserRegisterUserApiHook', # 0x2d2 'NtUserRemoteConnect', # 0x2d3 'NtUserRemoteRedrawRectangle', # 0x2d4 'NtUserRemoteRedrawScreen', # 0x2d5 'NtUserRemoteStopScreenUpdates', # 0x2d6 'NtUserRemoveClipboardFormatListener', # 0x2d7 'NtUserResolveDesktopForWOW', # 0x2d8 'NtUserSetAppImeLevel', # 0x2d9 'NtUserSetClassWord', # 0x2da 'NtUserSetCursorContents', # 0x2db 'NtUserSetImeHotKey', # 0x2dc 'NtUserSetImeInfoEx', # 0x2dd 'NtUserSetImeOwnerWindow', # 0x2de 'NtUserSetInternalWindowPos', # 0x2df 'NtUserSetLayeredWindowAttributes', # 0x2e0 'NtUserSetMenu', # 0x2e1 'NtUserSetMenuContextHelpId', # 0x2e2 'NtUserSetMenuFlagRtoL', # 0x2e3 'NtUserSetMirrorRendering', # 0x2e4 'NtUserSetObjectInformation', # 0x2e5 'NtUserSetProcessDPIAware', # 0x2e6 'NtUserSetShellWindowEx', # 0x2e7 'NtUserSetSysColors', # 0x2e8 'NtUserSetSystemCursor', # 0x2e9 'NtUserSetSystemTimer', # 0x2ea 'NtUserSetThreadLayoutHandles', # 0x2eb 'NtUserSetWindowRgnEx', # 0x2ec 'NtUserSetWindowStationUser', # 0x2ed 'NtUserShowSystemCursor', # 0x2ee 'NtUserSoundSentry', # 0x2ef 'NtUserSwitchDesktop', # 0x2f0 'NtUserTestForInteractiveUser', # 0x2f1 'NtUserTrackPopupMenuEx', # 0x2f2 'NtUserUnloadKeyboardLayout', # 0x2f3 'NtUserUnlockWindowStation', # 0x2f4 'NtUserUnregisterHotKey', # 0x2f5 'NtUserUnregisterSessionPort', # 0x2f6 'NtUserUnregisterUserApiHook', # 0x2f7 'NtUserUpdateInputContext', # 0x2f8 'NtUserUpdateInstance', # 0x2f9 'NtUserUpdateLayeredWindow', # 0x2fa 'NtUserUpdatePerUserSystemParameters', # 0x2fb 'NtUserUpdateWindowTransform', # 0x2fc 'NtUserUserHandleGrantAccess', # 0x2fd 'NtUserValidateHandleSecure', # 0x2fe 'NtUserWaitForInputIdle', # 0x2ff 'NtUserWaitForMsgAndEvent', # 0x300 'NtUserWin32PoolAllocationStats', # 0x301 'NtUserWindowFromPhysicalPoint', # 0x302 'NtUserYieldTask', # 0x303 'NtUserSetClassLongPtr', # 0x304 'NtUserSetWindowLongPtr', # 0x305 ], ]
Cisco-Talos/pyrebox
volatility/volatility/plugins/overlays/windows/vista_sp12_x64_syscalls.py
Python
gpl-2.0
43,395
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for rmsprop optimizer.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import copy import math from absl.testing import parameterized import numpy as np from tensorflow.contrib.optimizer_v2 import rmsprop from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import test_util from tensorflow.python.ops import embedding_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import resource_variable_ops from tensorflow.python.ops import variables from tensorflow.python.platform import test _DATA_TYPES = [dtypes.half, dtypes.float32] _TEST_PARAM_VALUES = [ # learning_rate, decay, momentum, epsilon, centered, use_resource [0.5, 0.9, 0.0, 1.0, True, False], [0.5, 0.9, 0.0, 1.0, False, False], [0.5, 0.9, 0.0, 1.0, True, True], [0.5, 0.9, 0.0, 1.0, False, True], [0.1, 0.9, 0.0, 1.0, True, False], [0.5, 0.95, 0.0, 1.0, False, False], [0.5, 0.8, 0.0, 1e-3, True, False], [0.5, 0.8, 0.9, 1e-3, True, False], ] class RMSPropOptimizerTest(test.TestCase, parameterized.TestCase): def _rmsprop_update_numpy(self, var, g, mg, rms, mom, lr, decay, momentum, centered): rms_t = rms * decay + (1 - decay) * g * g if centered: mg_t = mg * decay + (1 - decay) * g denom_t = rms_t - mg_t * mg_t else: mg_t = mg denom_t = rms_t mom_t = momentum * mom + lr * g / np.sqrt(denom_t, dtype=denom_t.dtype) var_t = var - mom_t return var_t, mg_t, rms_t, mom_t def _sparse_rmsprop_update_numpy(self, var, gindexs, gvalues, mg, rms, mom, lr, decay, momentum, centered): mg_t = copy.deepcopy(mg) rms_t = copy.deepcopy(rms) mom_t = copy.deepcopy(mom) var_t = copy.deepcopy(var) for i in range(len(gindexs)): gindex = gindexs[i] gvalue = gvalues[i] rms_t[gindex] = rms[gindex] * decay + (1 - decay) * gvalue * gvalue denom_t = rms_t[gindex] if centered: mg_t[gindex] = mg_t[gindex] * decay + (1 - decay) * gvalue denom_t -= mg_t[gindex] * mg_t[gindex] mom_t[gindex] = momentum * mom[gindex] + lr * gvalue / np.sqrt(denom_t) var_t[gindex] = var[gindex] - mom_t[gindex] return var_t, mg_t, rms_t, mom_t @parameterized.named_parameters( *test_util.generate_combinations_with_testcase_name( dtype=_DATA_TYPES, param_value=_TEST_PARAM_VALUES)) def testDense(self, dtype, param_value): (learning_rate, decay, momentum, epsilon, centered, use_resource) = tuple( param_value) with self.session(use_gpu=True): # Initialize variables for numpy implementation. var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype) grads0_np = np.array([0.1, 0.2], dtype=dtype.as_numpy_dtype) var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype) grads1_np = np.array([0.01, 0.2], dtype=dtype.as_numpy_dtype) if use_resource: var0 = resource_variable_ops.ResourceVariable(var0_np) var1 = resource_variable_ops.ResourceVariable(var1_np) else: var0 = variables.Variable(var0_np) var1 = variables.Variable(var1_np) grads0 = constant_op.constant(grads0_np) grads1 = constant_op.constant(grads1_np) opt = rmsprop.RMSPropOptimizer( learning_rate=learning_rate, decay=decay, momentum=momentum, epsilon=epsilon, centered=centered) update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) variables.global_variables_initializer().run() mg0 = opt.get_slot(var0, "mg") self.assertEqual(mg0 is not None, centered) mg1 = opt.get_slot(var1, "mg") self.assertEqual(mg1 is not None, centered) rms0 = opt.get_slot(var0, "rms") self.assertIsNotNone(rms0) rms1 = opt.get_slot(var1, "rms") self.assertIsNotNone(rms1) mom0 = opt.get_slot(var0, "momentum") self.assertIsNotNone(mom0) mom1 = opt.get_slot(var1, "momentum") self.assertIsNotNone(mom1) mg0_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) mg1_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) rms0_np = np.array([epsilon, epsilon], dtype=dtype.as_numpy_dtype) rms1_np = np.array([epsilon, epsilon], dtype=dtype.as_numpy_dtype) mom0_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) mom1_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) # Fetch params to validate initial values self.assertAllClose([1.0, 2.0], var0.eval()) self.assertAllClose([3.0, 4.0], var1.eval()) # Run 4 steps of RMSProp for _ in range(4): update.run() var0_np, mg0_np, rms0_np, mom0_np = self._rmsprop_update_numpy( var0_np, grads0_np, mg0_np, rms0_np, mom0_np, learning_rate, decay, momentum, centered) var1_np, mg1_np, rms1_np, mom1_np = self._rmsprop_update_numpy( var1_np, grads1_np, mg1_np, rms1_np, mom1_np, learning_rate, decay, momentum, centered) # Validate updated params if centered: self.assertAllCloseAccordingToType(mg0_np, mg0.eval()) self.assertAllCloseAccordingToType(mg1_np, mg1.eval()) self.assertAllCloseAccordingToType(rms0_np, rms0.eval()) self.assertAllCloseAccordingToType(rms1_np, rms1.eval()) self.assertAllCloseAccordingToType(mom0_np, mom0.eval()) self.assertAllCloseAccordingToType(mom1_np, mom1.eval()) # TODO(b/117393988): Reduce tolerances for float16. self.assertAllCloseAccordingToType( var0_np, var0.eval(), half_rtol=3e-3, half_atol=3e-3) self.assertAllCloseAccordingToType( var1_np, var1.eval(), half_rtol=3e-3, half_atol=3e-3) @parameterized.parameters([dtypes.float32, dtypes.float64]) def testMinimizeSparseResourceVariable(self, dtype): with self.cached_session(): var0 = resource_variable_ops.ResourceVariable([[1.0, 2.0]], dtype=dtype) x = constant_op.constant([[4.0], [5.0]], dtype=dtype) pred = math_ops.matmul(embedding_ops.embedding_lookup([var0], [0]), x) loss = pred * pred sgd_op = rmsprop.RMSPropOptimizer( learning_rate=1.0, decay=0.0, momentum=0.0, epsilon=0.0, centered=False).minimize(loss) variables.global_variables_initializer().run() # Fetch params to validate initial values self.assertAllCloseAccordingToType([[1.0, 2.0]], var0.eval()) # Run 1 step of sgd sgd_op.run() # Validate updated params self.assertAllCloseAccordingToType( [[0., 1.]], var0.eval(), atol=0.01) @parameterized.parameters([dtypes.float32, dtypes.float64]) def testMinimizeSparseResourceVariableCentered(self, dtype): with self.cached_session(): var0 = resource_variable_ops.ResourceVariable([[1.0, 2.0]], dtype=dtype) x = constant_op.constant([[4.0], [5.0]], dtype=dtype) pred = math_ops.matmul(embedding_ops.embedding_lookup([var0], [0]), x) loss = pred * pred sgd_op = rmsprop.RMSPropOptimizer( learning_rate=1.0, decay=0.1, momentum=0.0, epsilon=1.0, centered=True).minimize(loss) variables.global_variables_initializer().run() # Fetch params to validate initial values self.assertAllCloseAccordingToType([[1.0, 2.0]], var0.eval()) # Run 1 step of sgd sgd_op.run() # Validate updated params self.assertAllCloseAccordingToType( [[-7/3.0, -4/3.0]], var0.eval(), atol=0.01) @parameterized.named_parameters( *test_util.generate_combinations_with_testcase_name( dtype=_DATA_TYPES, param_value=_TEST_PARAM_VALUES)) def testSparse(self, dtype, param_value): (learning_rate, decay, momentum, epsilon, centered, _) = tuple( param_value) with self.session(use_gpu=True): # Initialize variables for numpy implementation. var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype) grads0_np = np.array([0.1], dtype=dtype.as_numpy_dtype) var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype) grads1_np = np.array([0.01], dtype=dtype.as_numpy_dtype) var0 = variables.Variable(var0_np) var1 = variables.Variable(var1_np) grads0_np_indices = np.array([0], dtype=np.int32) grads0 = ops.IndexedSlices( constant_op.constant(grads0_np), constant_op.constant(grads0_np_indices), constant_op.constant([1])) grads1_np_indices = np.array([1], dtype=np.int32) grads1 = ops.IndexedSlices( constant_op.constant(grads1_np), constant_op.constant(grads1_np_indices), constant_op.constant([1])) opt = rmsprop.RMSPropOptimizer( learning_rate=learning_rate, decay=decay, momentum=momentum, epsilon=epsilon, centered=centered) update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) variables.global_variables_initializer().run() mg0 = opt.get_slot(var0, "mg") self.assertEqual(mg0 is not None, centered) mg1 = opt.get_slot(var1, "mg") self.assertEqual(mg1 is not None, centered) rms0 = opt.get_slot(var0, "rms") self.assertIsNotNone(rms0) rms1 = opt.get_slot(var1, "rms") self.assertIsNotNone(rms1) mom0 = opt.get_slot(var0, "momentum") self.assertIsNotNone(mom0) mom1 = opt.get_slot(var1, "momentum") self.assertIsNotNone(mom1) mg0_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) mg1_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) rms0_np = np.array([epsilon, epsilon], dtype=dtype.as_numpy_dtype) rms1_np = np.array([epsilon, epsilon], dtype=dtype.as_numpy_dtype) mom0_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) mom1_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) # Fetch params to validate initial values self.assertAllClose([1.0, 2.0], var0.eval()) self.assertAllClose([3.0, 4.0], var1.eval()) # Run 4 steps of RMSProp for _ in range(4): update.run() var0_np, mg0_np, rms0_np, mom0_np = self._sparse_rmsprop_update_numpy( var0_np, grads0_np_indices, grads0_np, mg0_np, rms0_np, mom0_np, learning_rate, decay, momentum, centered) var1_np, mg1_np, rms1_np, mom1_np = self._sparse_rmsprop_update_numpy( var1_np, grads1_np_indices, grads1_np, mg1_np, rms1_np, mom1_np, learning_rate, decay, momentum, centered) # Validate updated params if centered: self.assertAllCloseAccordingToType(mg0_np, mg0.eval()) self.assertAllCloseAccordingToType(mg1_np, mg1.eval()) self.assertAllCloseAccordingToType(rms0_np, rms0.eval()) self.assertAllCloseAccordingToType(rms1_np, rms1.eval()) self.assertAllCloseAccordingToType(mom0_np, mom0.eval()) self.assertAllCloseAccordingToType(mom1_np, mom1.eval()) self.assertAllCloseAccordingToType(var0_np, var0.eval()) self.assertAllCloseAccordingToType(var1_np, var1.eval()) @parameterized.parameters(_DATA_TYPES) def testWithoutMomentum(self, dtype): with self.session(use_gpu=True): var0 = variables.Variable([1.0, 2.0], dtype=dtype) var1 = variables.Variable([3.0, 4.0], dtype=dtype) grads0 = constant_op.constant([0.1, 0.1], dtype=dtype) grads1 = constant_op.constant([0.01, 0.01], dtype=dtype) opt = rmsprop.RMSPropOptimizer( learning_rate=2.0, decay=0.9, momentum=0.0, epsilon=1.0) update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) variables.global_variables_initializer().run() rms0 = opt.get_slot(var0, "rms") self.assertIsNotNone(rms0) rms1 = opt.get_slot(var1, "rms") self.assertIsNotNone(rms1) mom0 = opt.get_slot(var0, "momentum") self.assertIsNotNone(mom0) mom1 = opt.get_slot(var1, "momentum") self.assertIsNotNone(mom1) # Fetch params to validate initial values self.assertAllClose([1.0, 2.0], var0.eval()) self.assertAllClose([3.0, 4.0], var1.eval()) # Step 1: the rms accumulators where 1. So we should see a normal # update: v -= grad * learning_rate update.run() # Check the root mean square accumulators. self.assertAllCloseAccordingToType( np.array([0.901, 0.901]), rms0.eval()) self.assertAllCloseAccordingToType( np.array([0.90001, 0.90001]), rms1.eval()) # Check the parameters. self.assertAllCloseAccordingToType( np.array([ 1.0 - (0.1 * 2.0 / math.sqrt(0.901)), 2.0 - (0.1 * 2.0 / math.sqrt(0.901)) ]), var0.eval()) self.assertAllCloseAccordingToType( np.array([ 3.0 - (0.01 * 2.0 / math.sqrt(0.90001)), 4.0 - (0.01 * 2.0 / math.sqrt(0.90001)) ]), var1.eval()) # Step 2: the root mean square accumulators contain the previous update. update.run() # Check the rms accumulators. self.assertAllCloseAccordingToType( np.array([0.901 * 0.9 + 0.001, 0.901 * 0.9 + 0.001]), rms0.eval()) self.assertAllCloseAccordingToType( np.array([0.90001 * 0.9 + 1e-5, 0.90001 * 0.9 + 1e-5]), rms1.eval()) # Check the parameters. self.assertAllCloseAccordingToType( np.array([ 1.0 - (0.1 * 2.0 / math.sqrt(0.901)) - (0.1 * 2.0 / math.sqrt(0.901 * 0.9 + 0.001)), 2.0 - (0.1 * 2.0 / math.sqrt(0.901)) - (0.1 * 2.0 / math.sqrt(0.901 * 0.9 + 0.001)) ]), var0.eval()) self.assertAllCloseAccordingToType( np.array([ 3.0 - (0.01 * 2.0 / math.sqrt(0.90001)) - (0.01 * 2.0 / math.sqrt(0.90001 * 0.9 + 1e-5)), 4.0 - (0.01 * 2.0 / math.sqrt(0.90001)) - (0.01 * 2.0 / math.sqrt(0.90001 * 0.9 + 1e-5)) ]), var1.eval()) @parameterized.parameters(_DATA_TYPES) def testWithMomentum(self, dtype): with self.session(use_gpu=True): var0 = variables.Variable([1.0, 2.0], dtype=dtype) var1 = variables.Variable([3.0, 4.0], dtype=dtype) grads0 = constant_op.constant([0.1, 0.1], dtype=dtype) grads1 = constant_op.constant([0.01, 0.01], dtype=dtype) opt = rmsprop.RMSPropOptimizer( learning_rate=2.0, decay=0.9, momentum=0.5, epsilon=1.0) update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) variables.global_variables_initializer().run() rms0 = opt.get_slot(var0, "rms") self.assertIsNotNone(rms0) rms1 = opt.get_slot(var1, "rms") self.assertIsNotNone(rms1) mom0 = opt.get_slot(var0, "momentum") self.assertIsNotNone(mom0) mom1 = opt.get_slot(var1, "momentum") self.assertIsNotNone(mom1) # Fetch params to validate initial values self.assertAllClose([1.0, 2.0], var0.eval()) self.assertAllClose([3.0, 4.0], var1.eval()) # Step 1: rms = 1, mom = 0. So we should see a normal # update: v -= grad * learning_rate update.run() # Check the root mean square accumulators. self.assertAllCloseAccordingToType( np.array([0.901, 0.901]), rms0.eval()) self.assertAllCloseAccordingToType( np.array([0.90001, 0.90001]), rms1.eval()) # Check the momentum accumulators self.assertAllCloseAccordingToType( np.array([(0.1 * 2.0 / math.sqrt(0.901)), (0.1 * 2.0 / math.sqrt(0.901))]), mom0.eval()) self.assertAllCloseAccordingToType( np.array([(0.01 * 2.0 / math.sqrt(0.90001)), (0.01 * 2.0 / math.sqrt(0.90001))]), mom1.eval()) # Check that the parameters. self.assertAllCloseAccordingToType( np.array([ 1.0 - (0.1 * 2.0 / math.sqrt(0.901)), 2.0 - (0.1 * 2.0 / math.sqrt(0.901)) ]), var0.eval()) self.assertAllCloseAccordingToType( np.array([ 3.0 - (0.01 * 2.0 / math.sqrt(0.90001)), 4.0 - (0.01 * 2.0 / math.sqrt(0.90001)) ]), var1.eval()) # Step 2: the root mean square accumulators contain the previous update. update.run() # Check the rms accumulators. self.assertAllCloseAccordingToType( np.array([0.901 * 0.9 + 0.001, 0.901 * 0.9 + 0.001]), rms0.eval()) self.assertAllCloseAccordingToType( np.array([0.90001 * 0.9 + 1e-5, 0.90001 * 0.9 + 1e-5]), rms1.eval()) self.assertAllCloseAccordingToType( np.array([ 0.5 * (0.1 * 2.0 / math.sqrt(0.901)) + (0.1 * 2.0 / math.sqrt(0.901 * 0.9 + 0.001)), 0.5 * (0.1 * 2.0 / math.sqrt(0.901)) + (0.1 * 2.0 / math.sqrt(0.901 * 0.9 + 0.001)) ]), mom0.eval()) self.assertAllCloseAccordingToType( np.array([ 0.5 * (0.01 * 2.0 / math.sqrt(0.90001)) + (0.01 * 2.0 / math.sqrt(0.90001 * 0.9 + 1e-5)), 0.5 * (0.01 * 2.0 / math.sqrt(0.90001)) + (0.01 * 2.0 / math.sqrt(0.90001 * 0.9 + 1e-5)) ]), mom1.eval()) # Check the parameters. self.assertAllCloseAccordingToType( np.array([ 1.0 - (0.1 * 2.0 / math.sqrt(0.901)) - (0.5 * (0.1 * 2.0 / math.sqrt(0.901)) + (0.1 * 2.0 / math.sqrt(0.901 * 0.9 + 0.001))), 2.0 - (0.1 * 2.0 / math.sqrt(0.901)) - (0.5 * (0.1 * 2.0 / math.sqrt(0.901)) + (0.1 * 2.0 / math.sqrt(0.901 * 0.9 + 0.001))) ]), var0.eval()) self.assertAllCloseAccordingToType( np.array([ 3.0 - (0.01 * 2.0 / math.sqrt(0.90001)) - (0.5 * (0.01 * 2.0 / math.sqrt(0.90001)) + (0.01 * 2.0 / math.sqrt(0.90001 * 0.9 + 1e-5))), 4.0 - (0.01 * 2.0 / math.sqrt(0.90001)) - (0.5 * (0.01 * 2.0 / math.sqrt(0.90001)) + (0.01 * 2.0 / math.sqrt(0.90001 * 0.9 + 1e-5))) ]), var1.eval()) if __name__ == "__main__": test.main()
hfp/tensorflow-xsmm
tensorflow/contrib/optimizer_v2/rmsprop_test.py
Python
apache-2.0
19,033
########################################################################## # # Copyright (c) 2014, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above # copyright notice, this list of conditions and the following # disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided with # the distribution. # # * Neither the name of John Haddon nor the names of # any other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import Gaffer import GafferScene ########################################################################## # Metadata ########################################################################## Gaffer.Metadata.registerNode( GafferScene.DeleteOptions, "description", """ A node which removes options from the globals. """, plugs = { "names" : [ "description", """ The names of options to be removed. Names should be separated by spaces and can use Gaffer's standard wildcards. """, ], "invertNames" : [ "description", """ When on, matching names are kept, and non-matching names are removed. """, ], } )
lucienfostier/gaffer
python/GafferSceneUI/DeleteOptionsUI.py
Python
bsd-3-clause
2,461
# -*- test-case-name: twisted.pb.test.test_promise -*- from twisted.python import util, failure from twisted.internet import defer id = util.unsignedID EVENTUAL, FULFILLED, BROKEN = range(3) class Promise: """I am a promise of a future result. I am a lot like a Deferred, except that my promised result is usually an instance. I make it possible to schedule method invocations on this future instance, returning Promises for the results. Promises are always in one of three states: Eventual, Fulfilled, and Broken. (see http://www.erights.org/elib/concurrency/refmech.html for a pretty picture). They start as Eventual, meaning we do not yet know whether they will resolve or not. In this state, method invocations are queued. Eventually the Promise will be 'resolved' into either the Fulfilled or the Broken state. Fulfilled means that the promise contains a live object to which methods can be dispatched synchronously. Broken promises are incapable of invoking methods: they all result in Failure. Method invocation is always asynchronous: it always returns a Promise. """ # all our internal methods are private, to avoid colliding with normal # method names that users may invoke on our eventual target. _state = EVENTUAL _resolution = None def __init__(self, d): self._watchers = [] self._pendingMethods = [] d.addCallbacks(self._ready, self._broken) def _wait_for_resolution(self): if self._state == EVENTUAL: d = defer.Deferred() self._watchers.append(d) else: d = defer.succeed(self._resolution) return d def _ready(self, resolution): self._resolution = resolution self._state = FULFILLED self._run_methods() def _broken(self, f): self._resolution = f self._state = BROKEN self._run_methods() def _invoke_method(self, name, args, kwargs): if isinstance(self._resolution, failure.Failure): return self._resolution method = getattr(self._resolution, name) res = method(*args, **kwargs) return res def _run_methods(self): for (name, args, kwargs, result_deferred) in self._pendingMethods: d = defer.maybeDeferred(self._invoke_method, name, args, kwargs) d.addBoth(result_deferred.callback) del self._pendingMethods for d in self._watchers: d.callback(self._resolution) del self._watchers def __repr__(self): return "<Promise %#x>" % id(self) def __getattr__(self, name): if name.startswith("__"): raise AttributeError def newmethod(*args, **kwargs): return self._add_method(name, args, kwargs) return newmethod def _add_method(self, name, args, kwargs): if self._state == EVENTUAL: d = defer.Deferred() self._pendingMethods.append((name, args, kwargs, d)) else: d = defer.maybeDeferred(self._invoke_method, name, args, kwargs) return Promise(d) def when(p): """Turn a Promise into a Deferred that will fire with the enclosed object when it is ready. Use this when you actually need to schedule something to happen in a synchronous fashion. Most of the time, you can just invoke methods on the Promise as if it were immediately available.""" assert isinstance(p, Promise) return p._wait_for_resolution()
tquilian/exelearningTest
twisted/pb/promise.py
Python
gpl-2.0
3,532
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # MicroPython documentation build configuration file, created by # sphinx-quickstart on Sun Sep 21 11:42:03 2014. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. #master_doc = 'index' # General information about the project. project = 'MicroPython' copyright = '2014, Damien P. George' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '1.4' # The full version, including alpha/beta/rc tags. release = '1.4.5' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # on_rtd is whether we are on readthedocs.org on_rtd = os.environ.get('READTHEDOCS', None) == 'True' if not on_rtd: # only import and set the theme if we're building docs locally try: import sphinx_rtd_theme html_theme = 'sphinx_rtd_theme' html_theme_path = [sphinx_rtd_theme.get_html_theme_path(), '.'] except: html_theme = 'default' html_theme_path = ['.'] else: html_theme_path = ['.'] # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = ['.'] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = '../../logo/trans-logo.png' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. html_last_updated_fmt = '%d %b %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. html_additional_pages = {"index": "topindex.html"} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'MicroPythondoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'MicroPython.tex', 'MicroPython Documentation', 'Damien P. George', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'micropython', 'MicroPython Documentation', ['Damien P. George'], 1), ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'MicroPython', 'MicroPython Documentation', 'Damien P. George', 'MicroPython', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'http://docs.python.org/': None} # Work out the port to generate the docs for from collections import OrderedDict micropy_port = os.getenv('MICROPY_PORT') or 'pyboard' tags.add('port_' + micropy_port) ports = OrderedDict(( ("unix", "unix"), ("pyboard", "the pyboard"), ("wipy", "the WiPy"), ("esp8266", "esp8266"), )) # The members of the html_context dict are available inside topindex.html url_prefix = os.getenv('MICROPY_URL_PREFIX') or '/' html_context = { 'port':micropy_port, 'port_name':ports[micropy_port], 'all_ports':[(n, url_prefix + p) for p, n in ports.items()], } # Append the other ports' specific folders/files to the exclude pattern exclude_patterns.extend([port + '*' for port in ports if port != micropy_port]) # Specify a custom master document based on the port name master_doc = micropy_port + '_' + 'index'
vriera/micropython
docs/conf.py
Python
mit
9,675
""" Boolean geometry utilities. """ from __future__ import absolute_import #Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module. import __init__ import sys __author__ = 'Enrique Perez ([email protected])' __credits__ = 'Art of Illusion <http://www.artofillusion.org/>' __date__ = '$Date: 2008/02/05 $' __license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html' def _getAccessibleAttribute(attributeName): 'Get the accessible attribute.' if attributeName in globalAccessibleAttributeDictionary: return globalAccessibleAttributeDictionary[attributeName] return None def continuous(valueString): 'Print continuous.' sys.stdout.write(str(valueString)) return valueString def line(valueString): 'Print line.' print(valueString) return valueString globalAccessibleAttributeDictionary = {'continuous' : continuous, 'line' : line}
dob71/x2swn
skeinforge/fabmetheus_utilities/geometry/geometry_utilities/evaluate_fundamentals/print.py
Python
gpl-3.0
984
from openerp.osv import osv, fields from openerp.tools.translate import _ from openerp.addons.point_of_sale.point_of_sale import pos_session class pos_session_opening(osv.osv_memory): _name = 'pos.session.opening' _columns = { 'pos_config_id' : fields.many2one('pos.config', string='Point of Sale', required=True), 'pos_session_id' : fields.many2one('pos.session', string='PoS Session'), 'pos_state' : fields.related('pos_session_id', 'state', type='selection', selection=pos_session.POS_SESSION_STATE, string='Session Status', readonly=True), 'pos_state_str' : fields.char('Status', readonly=True), 'show_config' : fields.boolean('Show Config', readonly=True), 'pos_session_name' : fields.related('pos_session_id', 'name', string="Session Name", type='char', size=64, readonly=True), 'pos_session_username' : fields.related('pos_session_id', 'user_id', 'name', type='char', size=64, readonly=True) } def open_ui(self, cr, uid, ids, context=None): data = self.browse(cr, uid, ids[0], context=context) context = dict(context or {}) context['active_id'] = data.pos_session_id.id return { 'type' : 'ir.actions.act_url', 'url': '/pos/web/', 'target': 'self', } def open_existing_session_cb_close(self, cr, uid, ids, context=None): wizard = self.browse(cr, uid, ids[0], context=context) wizard.pos_session_id.signal_workflow('cashbox_control') return self.open_session_cb(cr, uid, ids, context) def open_session_cb(self, cr, uid, ids, context=None): assert len(ids) == 1, "you can open only one session at a time" proxy = self.pool.get('pos.session') wizard = self.browse(cr, uid, ids[0], context=context) if not wizard.pos_session_id: values = { 'user_id' : uid, 'config_id' : wizard.pos_config_id.id, } session_id = proxy.create(cr, uid, values, context=context) s = proxy.browse(cr, uid, session_id, context=context) if s.state=='opened': return self.open_ui(cr, uid, ids, context=context) return self._open_session(session_id) return self._open_session(wizard.pos_session_id.id) def open_existing_session_cb(self, cr, uid, ids, context=None): assert len(ids) == 1 wizard = self.browse(cr, uid, ids[0], context=context) return self._open_session(wizard.pos_session_id.id) def _open_session(self, session_id): return { 'name': _('Session'), 'view_type': 'form', 'view_mode': 'form,tree', 'res_model': 'pos.session', 'res_id': session_id, 'view_id': False, 'type': 'ir.actions.act_window', } def on_change_config(self, cr, uid, ids, config_id, context=None): result = { 'pos_session_id': False, 'pos_state': False, 'pos_state_str' : '', 'pos_session_username' : False, 'pos_session_name' : False, } if not config_id: return {'value' : result} proxy = self.pool.get('pos.session') session_ids = proxy.search(cr, uid, [ ('state', '!=', 'closed'), ('config_id', '=', config_id), ('user_id', '=', uid), ], context=context) if session_ids: session = proxy.browse(cr, uid, session_ids[0], context=context) result['pos_state'] = str(session.state) result['pos_state_str'] = dict(pos_session.POS_SESSION_STATE).get(session.state, '') result['pos_session_id'] = session.id result['pos_session_name'] = session.name result['pos_session_username'] = session.user_id.name return {'value' : result} def default_get(self, cr, uid, fieldnames, context=None): so = self.pool.get('pos.session') session_ids = so.search(cr, uid, [('state','<>','closed'), ('user_id','=',uid)], context=context) if session_ids: result = so.browse(cr, uid, session_ids[0], context=context).config_id.id else: current_user = self.pool.get('res.users').browse(cr, uid, uid, context=context) result = current_user.pos_config and current_user.pos_config.id or False if not result: r = self.pool.get('pos.config').search(cr, uid, [], context=context) result = r and r[0] or False count = self.pool.get('pos.config').search_count(cr, uid, [('state', '=', 'active')], context=context) show_config = bool(count > 1) return { 'pos_config_id' : result, 'show_config' : show_config, }
addition-it-solutions/project-all
addons/point_of_sale/wizard/pos_session_opening.py
Python
agpl-3.0
5,032
# Copyright 2014, Rackspace, US, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This package holds the REST API that supports the Horizon dashboard Javascript code. It is not intended to be used outside of Horizon, and makes no promises of stability or fitness for purpose outside of that scope. It does not promise to adhere to the general OpenStack API Guidelines set out in https://wiki.openstack.org/wiki/APIChangeGuidelines. """ # import REST API modules here from . import cinder #flake8: noqa from . import config #flake8: noqa from . import glance #flake8: noqa from . import heat #flake8: noqa from . import keystone #flake8: noqa from . import network #flake8: noqa from . import neutron #flake8: noqa from . import nova #flake8: noqa from . import policy #flake8: noqa
takeshineshiro/horizon
openstack_dashboard/api/rest/__init__.py
Python
apache-2.0
1,343
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Functions to provide simpler and prettier logging.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import logging_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import tensor_array_ops __all__ = ["print_op"] def _get_tensor_repr(t, print_tensor_name=True, print_tensor_type=True, print_shape=True, summarize_indicator_vector=True): """Return a list of Tensors that summarize the given tensor t.""" tensor_list = [] if print_tensor_name and isinstance(t, ops.Tensor): tensor_list.append(constant_op.constant("Name: " + t.name)) if print_tensor_type: if isinstance(t, ops.Tensor): t_type_str = "Type: Tensor ({})".format(t.dtype.name) elif isinstance(t, ops.SparseTensor): t_type_str = "Type: SparseTensor ({})".format(t.dtype.name) elif isinstance(t, tensor_array_ops.TensorArray): t_type_str = "Type: TensorArray ({})".format(t.dtype.name) tensor_list.append(constant_op.constant(t_type_str)) if print_shape: if isinstance(t, ops.SparseTensor): tensor_list.append(constant_op.constant("Shape:")) tensor_list.append(t.shape) elif isinstance(t, ops.Tensor): tensor_list.append(constant_op.constant("Shape: " + str(t.get_shape( ).dims))) elif isinstance(t, tensor_array_ops.TensorArray): tensor_list.append(constant_op.constant("Size:")) tensor_list.append(t.size()) if summarize_indicator_vector and t.dtype == dtypes.bool: int_tensor = math_ops.cast(t, dtypes.uint8) tensor_list.append(constant_op.constant("First True in Boolean tensor at:")) tensor_list.append(math_ops.argmax(int_tensor, 0)) if isinstance(t, ops.SparseTensor): tensor_list.append(constant_op.constant("Sparse indices:")) tensor_list.append(t.indices) tensor_list.append(constant_op.constant("Sparse values:")) tensor_list.append(t.values) elif isinstance(t, ops.Tensor): tensor_list.append(constant_op.constant("Value:")) tensor_list.append(t) elif isinstance(t, tensor_array_ops.TensorArray): tensor_list.append(constant_op.constant("Value:")) tensor_list.append(t.pack()) return tensor_list def print_op(input_, data=None, message=None, first_n=None, summarize=20, print_tensor_name=True, print_tensor_type=True, print_shape=True, summarize_indicator_vector=True, name=None): """Creates a print op that will print when a tensor is accessed. Wraps the tensor passed in so that whenever that tensor is accessed, the message `message` is printed, along with the current value of the tensor `t` and an optional list of other tensors. Args: input_: A Tensor/SparseTensor/TensorArray to print when it is evaluated. data: A list of other tensors to print. message: A string message to print as a prefix. first_n: Only log `first_n` number of times. Negative numbers log always; this is the default. summarize: Print this number of elements in the tensor. print_tensor_name: Print the tensor name. print_tensor_type: Print the tensor type. print_shape: Print the tensor's shape. summarize_indicator_vector: Whether to print the index of the first true value in an indicator vector (a Boolean tensor). name: The name to give this op. Returns: A Print op. The Print op returns `input_`. Raises: ValueError: If the tensor `input_` is not a Tensor, SparseTensor or TensorArray. """ message = message or "" if input_ is None: raise ValueError("input_ must be of type " "Tensor, SparseTensor or TensorArray") tensor_list = _get_tensor_repr(input_, print_tensor_name, print_tensor_type, print_shape, summarize_indicator_vector) if data is not None: for t in data: tensor_list.extend(_get_tensor_repr(t, print_tensor_name, print_tensor_type, print_shape, summarize_indicator_vector)) if isinstance(input_, ops.Tensor): input_ = logging_ops.Print(input_, tensor_list, message, first_n, summarize, name) elif isinstance(input_, ops.SparseTensor): p = logging_ops.Print( constant_op.constant([]), tensor_list, message, first_n, summarize, name) with ops.control_dependencies([p]): input_ = ops.SparseTensor(array_ops.identity(input_.indices), array_ops.identity(input_.values), array_ops.identity(input_.shape)) elif isinstance(input_, tensor_array_ops.TensorArray): p = logging_ops.Print( constant_op.constant([]), tensor_list, message, first_n, summarize, name) with ops.control_dependencies([p]): input_ = tensor_array_ops.TensorArray(dtype=input_.dtype, handle=input_.handle) else: raise ValueError("input_ must be of type " "Tensor, SparseTensor or TensorArray") return input_
cg31/tensorflow
tensorflow/contrib/framework/python/ops/prettyprint_ops.py
Python
apache-2.0
6,195
#!/usr/bin/env python3 # Copyright (c) 2014-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the getchaintips RPC. - introduce a network split - work on chains of different lengths - join the network together again - verify that getchaintips now returns two chain tips. """ from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal class GetChainTipsTest (BitcoinTestFramework): def set_test_params(self): self.num_nodes = 4 def run_test(self): tips = self.nodes[0].getchaintips() assert_equal(len(tips), 1) assert_equal(tips[0]['branchlen'], 0) assert_equal(tips[0]['height'], 200) assert_equal(tips[0]['status'], 'active') # Split the network and build two chains of different lengths. self.split_network() self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address) self.nodes[2].generatetoaddress(20, self.nodes[2].get_deterministic_priv_key().address) self.sync_all([self.nodes[:2], self.nodes[2:]]) tips = self.nodes[1].getchaintips () assert_equal (len (tips), 1) shortTip = tips[0] assert_equal (shortTip['branchlen'], 0) assert_equal (shortTip['height'], 210) assert_equal (tips[0]['status'], 'active') tips = self.nodes[3].getchaintips () assert_equal (len (tips), 1) longTip = tips[0] assert_equal (longTip['branchlen'], 0) assert_equal (longTip['height'], 220) assert_equal (tips[0]['status'], 'active') # Join the network halves and check that we now have two tips # (at least at the nodes that previously had the short chain). self.join_network () tips = self.nodes[0].getchaintips () assert_equal (len (tips), 2) assert_equal (tips[0], longTip) assert_equal (tips[1]['branchlen'], 10) assert_equal (tips[1]['status'], 'valid-fork') tips[1]['branchlen'] = 0 tips[1]['status'] = 'active' assert_equal (tips[1], shortTip) if __name__ == '__main__': GetChainTipsTest ().main ()
litecoin-project/litecoin
test/functional/rpc_getchaintips.py
Python
mit
2,291
import unittest import time from datetime import datetime from app import create_app, db from app.models import User, AnonymousUser, Role, Permission class UserModelTestCase(unittest.TestCase): def setUp(self): self.app = create_app('testing') self.app_context = self.app.app_context() self.app_context.push() db.create_all() Role.insert_roles() def tearDown(self): db.session.remove() db.drop_all() self.app_context.pop() def test_password_setter(self): u = User(password='cat') self.assertTrue(u.password_hash is not None) def test_no_password_getter(self): u = User(password='cat') with self.assertRaises(AttributeError): u.password def test_password_verification(self): u = User(password='cat') self.assertTrue(u.verify_password('cat')) self.assertFalse(u.verify_password('dog')) def test_password_salts_are_random(self): u = User(password='cat') u2 = User(password='cat') self.assertTrue(u.password_hash != u2.password_hash) def test_valid_confirmation_token(self): u = User(password='cat') db.session.add(u) db.session.commit() token = u.generate_confirmation_token() self.assertTrue(u.confirm(token)) def test_invalid_confirmation_token(self): u1 = User(password='cat') u2 = User(password='dog') db.session.add(u1) db.session.add(u2) db.session.commit() token = u1.generate_confirmation_token() self.assertFalse(u2.confirm(token)) def test_expired_confirmation_token(self): u = User(password='cat') db.session.add(u) db.session.commit() token = u.generate_confirmation_token(1) time.sleep(2) self.assertFalse(u.confirm(token)) def test_valid_reset_token(self): u = User(password='cat') db.session.add(u) db.session.commit() token = u.generate_reset_token() self.assertTrue(u.reset_password(token, 'dog')) self.assertTrue(u.verify_password('dog')) def test_invalid_reset_token(self): u1 = User(password='cat') u2 = User(password='dog') db.session.add(u1) db.session.add(u2) db.session.commit() token = u1.generate_reset_token() self.assertFalse(u2.reset_password(token, 'horse')) self.assertTrue(u2.verify_password('dog')) def test_valid_email_change_token(self): u = User(email='[email protected]', password='cat') db.session.add(u) db.session.commit() token = u.generate_email_change_token('[email protected]') self.assertTrue(u.change_email(token)) self.assertTrue(u.email == '[email protected]') def test_invalid_email_change_token(self): u1 = User(email='[email protected]', password='cat') u2 = User(email='[email protected]', password='dog') db.session.add(u1) db.session.add(u2) db.session.commit() token = u1.generate_email_change_token('[email protected]') self.assertFalse(u2.change_email(token)) self.assertTrue(u2.email == '[email protected]') def test_duplicate_email_change_token(self): u1 = User(email='[email protected]', password='cat') u2 = User(email='[email protected]', password='dog') db.session.add(u1) db.session.add(u2) db.session.commit() token = u2.generate_email_change_token('[email protected]') self.assertFalse(u2.change_email(token)) self.assertTrue(u2.email == '[email protected]') def test_roles_and_permissions(self): u = User(email='[email protected]', password='cat') self.assertTrue(u.can(Permission.WRITE_ARTICLES)) self.assertFalse(u.can(Permission.MODERATE_COMMENTS)) def test_anonymous_user(self): u = AnonymousUser() self.assertFalse(u.can(Permission.FOLLOW)) def test_timestamps(self): u = User(password='cat') db.session.add(u) db.session.commit() self.assertTrue( (datetime.utcnow() - u.member_since).total_seconds() < 3) self.assertTrue( (datetime.utcnow() - u.last_seen).total_seconds() < 3) def test_ping(self): u = User(password='cat') db.session.add(u) db.session.commit() time.sleep(2) last_seen_before = u.last_seen u.ping() self.assertTrue(u.last_seen > last_seen_before) def test_gravatar(self): u = User(email='[email protected]', password='cat') with self.app.test_request_context('/'): gravatar = u.gravatar() gravatar_256 = u.gravatar(size=256) gravatar_pg = u.gravatar(rating='pg') gravatar_retro = u.gravatar(default='retro') with self.app.test_request_context('/', base_url='https://example.com'): gravatar_ssl = u.gravatar() self.assertTrue('http://www.gravatar.com/avatar/' + 'd4c74594d841139328695756648b6bd6'in gravatar) self.assertTrue('s=256' in gravatar_256) self.assertTrue('r=pg' in gravatar_pg) self.assertTrue('d=retro' in gravatar_retro) self.assertTrue('https://secure.gravatar.com/avatar/' + 'd4c74594d841139328695756648b6bd6' in gravatar_ssl)
Ginray/my-flask-blog
tests/test_user_model.py
Python
mit
5,437
from ..libmp.backend import xrange from .functions import defun, defun_wrapped, defun_static @defun def stieltjes(ctx, n, a=1): n = ctx.convert(n) a = ctx.convert(a) if n < 0: return ctx.bad_domain("Stieltjes constants defined for n >= 0") if hasattr(ctx, "stieltjes_cache"): stieltjes_cache = ctx.stieltjes_cache else: stieltjes_cache = ctx.stieltjes_cache = {} if a == 1: if n == 0: return +ctx.euler if n in stieltjes_cache: prec, s = stieltjes_cache[n] if prec >= ctx.prec: return +s mag = 1 def f(x): xa = x/a v = (xa-ctx.j)*ctx.ln(a-ctx.j*x)**n/(1+xa**2)/(ctx.exp(2*ctx.pi*x)-1) return ctx._re(v) / mag orig = ctx.prec try: # Normalize integrand by approx. magnitude to # speed up quadrature (which uses absolute error) if n > 50: ctx.prec = 20 mag = ctx.quad(f, [0,ctx.inf], maxdegree=3) ctx.prec = orig + 10 + int(n**0.5) s = ctx.quad(f, [0,ctx.inf], maxdegree=20) v = ctx.ln(a)**n/(2*a) - ctx.ln(a)**(n+1)/(n+1) + 2*s/a*mag finally: ctx.prec = orig if a == 1 and ctx.isint(n): stieltjes_cache[n] = (ctx.prec, v) return +v @defun_wrapped def siegeltheta(ctx, t, derivative=0): d = int(derivative) if (t == ctx.inf or t == ctx.ninf): if d < 2: if t == ctx.ninf and d == 0: return ctx.ninf return ctx.inf else: return ctx.zero if d == 0: if ctx._im(t): # XXX: cancellation occurs a = ctx.loggamma(0.25+0.5j*t) b = ctx.loggamma(0.25-0.5j*t) return -ctx.ln(ctx.pi)/2*t - 0.5j*(a-b) else: if ctx.isinf(t): return t return ctx._im(ctx.loggamma(0.25+0.5j*t)) - ctx.ln(ctx.pi)/2*t if d > 0: a = (-0.5j)**(d-1)*ctx.polygamma(d-1, 0.25-0.5j*t) b = (0.5j)**(d-1)*ctx.polygamma(d-1, 0.25+0.5j*t) if ctx._im(t): if d == 1: return -0.5*ctx.log(ctx.pi)+0.25*(a+b) else: return 0.25*(a+b) else: if d == 1: return ctx._re(-0.5*ctx.log(ctx.pi)+0.25*(a+b)) else: return ctx._re(0.25*(a+b)) @defun_wrapped def grampoint(ctx, n): # asymptotic expansion, from # http://mathworld.wolfram.com/GramPoint.html g = 2*ctx.pi*ctx.exp(1+ctx.lambertw((8*n+1)/(8*ctx.e))) return ctx.findroot(lambda t: ctx.siegeltheta(t)-ctx.pi*n, g) @defun_wrapped def siegelz(ctx, t, **kwargs): d = int(kwargs.get("derivative", 0)) t = ctx.convert(t) t1 = ctx._re(t) t2 = ctx._im(t) prec = ctx.prec try: if abs(t1) > 500*prec and t2**2 < t1: v = ctx.rs_z(t, d) if ctx._is_real_type(t): return ctx._re(v) return v except NotImplementedError: pass ctx.prec += 21 e1 = ctx.expj(ctx.siegeltheta(t)) z = ctx.zeta(0.5+ctx.j*t) if d == 0: v = e1*z ctx.prec=prec if ctx._is_real_type(t): return ctx._re(v) return +v z1 = ctx.zeta(0.5+ctx.j*t, derivative=1) theta1 = ctx.siegeltheta(t, derivative=1) if d == 1: v = ctx.j*e1*(z1+z*theta1) ctx.prec=prec if ctx._is_real_type(t): return ctx._re(v) return +v z2 = ctx.zeta(0.5+ctx.j*t, derivative=2) theta2 = ctx.siegeltheta(t, derivative=2) comb1 = theta1**2-ctx.j*theta2 if d == 2: def terms(): return [2*z1*theta1, z2, z*comb1] v = ctx.sum_accurately(terms, 1) v = -e1*v ctx.prec = prec if ctx._is_real_type(t): return ctx._re(v) return +v ctx.prec += 10 z3 = ctx.zeta(0.5+ctx.j*t, derivative=3) theta3 = ctx.siegeltheta(t, derivative=3) comb2 = theta1**3-3*ctx.j*theta1*theta2-theta3 if d == 3: def terms(): return [3*theta1*z2, 3*z1*comb1, z3+z*comb2] v = ctx.sum_accurately(terms, 1) v = -ctx.j*e1*v ctx.prec = prec if ctx._is_real_type(t): return ctx._re(v) return +v z4 = ctx.zeta(0.5+ctx.j*t, derivative=4) theta4 = ctx.siegeltheta(t, derivative=4) def terms(): return [theta1**4, -6*ctx.j*theta1**2*theta2, -3*theta2**2, -4*theta1*theta3, ctx.j*theta4] comb3 = ctx.sum_accurately(terms, 1) if d == 4: def terms(): return [6*theta1**2*z2, -6*ctx.j*z2*theta2, 4*theta1*z3, 4*z1*comb2, z4, z*comb3] v = ctx.sum_accurately(terms, 1) v = e1*v ctx.prec = prec if ctx._is_real_type(t): return ctx._re(v) return +v if d > 4: h = lambda x: ctx.siegelz(x, derivative=4) return ctx.diff(h, t, n=d-4) _zeta_zeros = [ 14.134725142,21.022039639,25.010857580,30.424876126,32.935061588, 37.586178159,40.918719012,43.327073281,48.005150881,49.773832478, 52.970321478,56.446247697,59.347044003,60.831778525,65.112544048, 67.079810529,69.546401711,72.067157674,75.704690699,77.144840069, 79.337375020,82.910380854,84.735492981,87.425274613,88.809111208, 92.491899271,94.651344041,95.870634228,98.831194218,101.317851006, 103.725538040,105.446623052,107.168611184,111.029535543,111.874659177, 114.320220915,116.226680321,118.790782866,121.370125002,122.946829294, 124.256818554,127.516683880,129.578704200,131.087688531,133.497737203, 134.756509753,138.116042055,139.736208952,141.123707404,143.111845808, 146.000982487,147.422765343,150.053520421,150.925257612,153.024693811, 156.112909294,157.597591818,158.849988171,161.188964138,163.030709687, 165.537069188,167.184439978,169.094515416,169.911976479,173.411536520, 174.754191523,176.441434298,178.377407776,179.916484020,182.207078484, 184.874467848,185.598783678,187.228922584,189.416158656,192.026656361, 193.079726604,195.265396680,196.876481841,198.015309676,201.264751944, 202.493594514,204.189671803,205.394697202,207.906258888,209.576509717, 211.690862595,213.347919360,214.547044783,216.169538508,219.067596349, 220.714918839,221.430705555,224.007000255,224.983324670,227.421444280, 229.337413306,231.250188700,231.987235253,233.693404179,236.524229666, ] def _load_zeta_zeros(url): import urllib d = urllib.urlopen(url) L = [float(x) for x in d.readlines()] # Sanity check assert round(L[0]) == 14 _zeta_zeros[:] = L @defun def oldzetazero(ctx, n, url='http://www.dtc.umn.edu/~odlyzko/zeta_tables/zeros1'): n = int(n) if n < 0: return ctx.zetazero(-n).conjugate() if n == 0: raise ValueError("n must be nonzero") if n > len(_zeta_zeros) and n <= 100000: _load_zeta_zeros(url) if n > len(_zeta_zeros): raise NotImplementedError("n too large for zetazeros") return ctx.mpc(0.5, ctx.findroot(ctx.siegelz, _zeta_zeros[n-1])) @defun_wrapped def riemannr(ctx, x): if x == 0: return ctx.zero # Check if a simple asymptotic estimate is accurate enough if abs(x) > 1000: a = ctx.li(x) b = 0.5*ctx.li(ctx.sqrt(x)) if abs(b) < abs(a)*ctx.eps: return a if abs(x) < 0.01: # XXX ctx.prec += int(-ctx.log(abs(x),2)) # Sum Gram's series s = t = ctx.one u = ctx.ln(x) k = 1 while abs(t) > abs(s)*ctx.eps: t = t * u / k s += t / (k * ctx._zeta_int(k+1)) k += 1 return s @defun_static def primepi(ctx, x): x = int(x) if x < 2: return 0 return len(ctx.list_primes(x)) # TODO: fix the interface wrt contexts @defun_wrapped def primepi2(ctx, x): x = int(x) if x < 2: return ctx._iv.zero if x < 2657: return ctx._iv.mpf(ctx.primepi(x)) mid = ctx.li(x) # Schoenfeld's estimate for x >= 2657, assuming RH err = ctx.sqrt(x,rounding='u')*ctx.ln(x,rounding='u')/8/ctx.pi(rounding='d') a = ctx.floor((ctx._iv.mpf(mid)-err).a, rounding='d') b = ctx.ceil((ctx._iv.mpf(mid)+err).b, rounding='u') return ctx._iv.mpf([a,b]) @defun_wrapped def primezeta(ctx, s): if ctx.isnan(s): return s if ctx.re(s) <= 0: raise ValueError("prime zeta function defined only for re(s) > 0") if s == 1: return ctx.inf if s == 0.5: return ctx.mpc(ctx.ninf, ctx.pi) r = ctx.re(s) if r > ctx.prec: return 0.5**s else: wp = ctx.prec + int(r) def terms(): orig = ctx.prec # zeta ~ 1+eps; need to set precision # to get logarithm accurately k = 0 while 1: k += 1 u = ctx.moebius(k) if not u: continue ctx.prec = wp t = u*ctx.ln(ctx.zeta(k*s))/k if not t: return #print ctx.prec, ctx.nstr(t) ctx.prec = orig yield t return ctx.sum_accurately(terms) # TODO: for bernpoly and eulerpoly, ensure that all exact zeros are covered @defun_wrapped def bernpoly(ctx, n, z): # Slow implementation: #return sum(ctx.binomial(n,k)*ctx.bernoulli(k)*z**(n-k) for k in xrange(0,n+1)) n = int(n) if n < 0: raise ValueError("Bernoulli polynomials only defined for n >= 0") if z == 0 or (z == 1 and n > 1): return ctx.bernoulli(n) if z == 0.5: return (ctx.ldexp(1,1-n)-1)*ctx.bernoulli(n) if n <= 3: if n == 0: return z ** 0 if n == 1: return z - 0.5 if n == 2: return (6*z*(z-1)+1)/6 if n == 3: return z*(z*(z-1.5)+0.5) if ctx.isinf(z): return z ** n if ctx.isnan(z): return z if abs(z) > 2: def terms(): t = ctx.one yield t r = ctx.one/z k = 1 while k <= n: t = t*(n+1-k)/k*r if not (k > 2 and k & 1): yield t*ctx.bernoulli(k) k += 1 return ctx.sum_accurately(terms) * z**n else: def terms(): yield ctx.bernoulli(n) t = ctx.one k = 1 while k <= n: t = t*(n+1-k)/k * z m = n-k if not (m > 2 and m & 1): yield t*ctx.bernoulli(m) k += 1 return ctx.sum_accurately(terms) @defun_wrapped def eulerpoly(ctx, n, z): n = int(n) if n < 0: raise ValueError("Euler polynomials only defined for n >= 0") if n <= 2: if n == 0: return z ** 0 if n == 1: return z - 0.5 if n == 2: return z*(z-1) if ctx.isinf(z): return z**n if ctx.isnan(z): return z m = n+1 if z == 0: return -2*(ctx.ldexp(1,m)-1)*ctx.bernoulli(m)/m * z**0 if z == 1: return 2*(ctx.ldexp(1,m)-1)*ctx.bernoulli(m)/m * z**0 if z == 0.5: if n % 2: return ctx.zero # Use exact code for Euler numbers if n < 100 or n*ctx.mag(0.46839865*n) < ctx.prec*0.25: return ctx.ldexp(ctx._eulernum(n), -n) # http://functions.wolfram.com/Polynomials/EulerE2/06/01/02/01/0002/ def terms(): t = ctx.one k = 0 w = ctx.ldexp(1,n+2) while 1: v = n-k+1 if not (v > 2 and v & 1): yield (2-w)*ctx.bernoulli(v)*t k += 1 if k > n: break t = t*z*(n-k+2)/k w *= 0.5 return ctx.sum_accurately(terms) / m @defun def eulernum(ctx, n, exact=False): n = int(n) if exact: return int(ctx._eulernum(n)) if n < 100: return ctx.mpf(ctx._eulernum(n)) if n % 2: return ctx.zero return ctx.ldexp(ctx.eulerpoly(n,0.5), n) # TODO: this should be implemented low-level def polylog_series(ctx, s, z): tol = +ctx.eps l = ctx.zero k = 1 zk = z while 1: term = zk / k**s l += term if abs(term) < tol: break zk *= z k += 1 return l def polylog_continuation(ctx, n, z): if n < 0: return z*0 twopij = 2j * ctx.pi a = -twopij**n/ctx.fac(n) * ctx.bernpoly(n, ctx.ln(z)/twopij) if ctx._is_real_type(z) and z < 0: a = ctx._re(a) if ctx._im(z) < 0 or (ctx._im(z) == 0 and ctx._re(z) >= 1): a -= twopij*ctx.ln(z)**(n-1)/ctx.fac(n-1) return a def polylog_unitcircle(ctx, n, z): tol = +ctx.eps if n > 1: l = ctx.zero logz = ctx.ln(z) logmz = ctx.one m = 0 while 1: if (n-m) != 1: term = ctx.zeta(n-m) * logmz / ctx.fac(m) if term and abs(term) < tol: break l += term logmz *= logz m += 1 l += ctx.ln(z)**(n-1)/ctx.fac(n-1)*(ctx.harmonic(n-1)-ctx.ln(-ctx.ln(z))) elif n < 1: # else l = ctx.fac(-n)*(-ctx.ln(z))**(n-1) logz = ctx.ln(z) logkz = ctx.one k = 0 while 1: b = ctx.bernoulli(k-n+1) if b: term = b*logkz/(ctx.fac(k)*(k-n+1)) if abs(term) < tol: break l -= term logkz *= logz k += 1 else: raise ValueError if ctx._is_real_type(z) and z < 0: l = ctx._re(l) return l def polylog_general(ctx, s, z): v = ctx.zero u = ctx.ln(z) if not abs(u) < 5: # theoretically |u| < 2*pi raise NotImplementedError("polylog for arbitrary s and z") t = 1 k = 0 while 1: term = ctx.zeta(s-k) * t if abs(term) < ctx.eps: break v += term k += 1 t *= u t /= k return ctx.gamma(1-s)*(-u)**(s-1) + v @defun_wrapped def polylog(ctx, s, z): s = ctx.convert(s) z = ctx.convert(z) if z == 1: return ctx.zeta(s) if z == -1: return -ctx.altzeta(s) if s == 0: return z/(1-z) if s == 1: return -ctx.ln(1-z) if s == -1: return z/(1-z)**2 if abs(z) <= 0.75 or (not ctx.isint(s) and abs(z) < 0.9): return polylog_series(ctx, s, z) if abs(z) >= 1.4 and ctx.isint(s): return (-1)**(s+1)*polylog_series(ctx, s, 1/z) + polylog_continuation(ctx, s, z) if ctx.isint(s): return polylog_unitcircle(ctx, int(s), z) return polylog_general(ctx, s, z) #raise NotImplementedError("polylog for arbitrary s and z") # This could perhaps be used in some cases #from quadrature import quad #return quad(lambda t: t**(s-1)/(exp(t)/z-1),[0,inf])/gamma(s) @defun_wrapped def clsin(ctx, s, z, pi=False): if ctx.isint(s) and s < 0 and int(s) % 2 == 1: return z*0 if pi: a = ctx.expjpi(z) else: a = ctx.expj(z) if ctx._is_real_type(z) and ctx._is_real_type(s): return ctx.im(ctx.polylog(s,a)) b = 1/a return (-0.5j)*(ctx.polylog(s,a) - ctx.polylog(s,b)) @defun_wrapped def clcos(ctx, s, z, pi=False): if ctx.isint(s) and s < 0 and int(s) % 2 == 0: return z*0 if pi: a = ctx.expjpi(z) else: a = ctx.expj(z) if ctx._is_real_type(z) and ctx._is_real_type(s): return ctx.re(ctx.polylog(s,a)) b = 1/a return 0.5*(ctx.polylog(s,a) + ctx.polylog(s,b)) @defun def altzeta(ctx, s, **kwargs): try: return ctx._altzeta(s, **kwargs) except NotImplementedError: return ctx._altzeta_generic(s) @defun_wrapped def _altzeta_generic(ctx, s): if s == 1: return ctx.ln2 + 0*s return -ctx.powm1(2, 1-s) * ctx.zeta(s) @defun def zeta(ctx, s, a=1, derivative=0, method=None, **kwargs): d = int(derivative) if a == 1 and not (d or method): try: return ctx._zeta(s, **kwargs) except NotImplementedError: pass s = ctx.convert(s) prec = ctx.prec method = kwargs.get('method') verbose = kwargs.get('verbose') if a == 1 and method != 'euler-maclaurin': im = abs(ctx._im(s)) re = abs(ctx._re(s)) #if (im < prec or method == 'borwein') and not derivative: # try: # if verbose: # print "zeta: Attempting to use the Borwein algorithm" # return ctx._zeta(s, **kwargs) # except NotImplementedError: # if verbose: # print "zeta: Could not use the Borwein algorithm" # pass if abs(im) > 500*prec and 10*re < prec and derivative <= 4 or \ method == 'riemann-siegel': try: # py2.4 compatible try block try: if verbose: print("zeta: Attempting to use the Riemann-Siegel algorithm") return ctx.rs_zeta(s, derivative, **kwargs) except NotImplementedError: if verbose: print("zeta: Could not use the Riemann-Siegel algorithm") pass finally: ctx.prec = prec if s == 1: return ctx.inf abss = abs(s) if abss == ctx.inf: if ctx.re(s) == ctx.inf: if d == 0: return ctx.one return ctx.zero return s*0 elif ctx.isnan(abss): return 1/s if ctx.re(s) > 2*ctx.prec and a == 1 and not derivative: return ctx.one + ctx.power(2, -s) return +ctx._hurwitz(s, a, d, **kwargs) @defun def _hurwitz(ctx, s, a=1, d=0, **kwargs): prec = ctx.prec verbose = kwargs.get('verbose') try: extraprec = 10 ctx.prec += extraprec # We strongly want to special-case rational a a, atype = ctx._convert_param(a) if ctx.re(s) < 0: if verbose: print("zeta: Attempting reflection formula") try: return _hurwitz_reflection(ctx, s, a, d, atype) except NotImplementedError: pass if verbose: print("zeta: Reflection formula failed") if verbose: print("zeta: Using the Euler-Maclaurin algorithm") while 1: ctx.prec = prec + extraprec T1, T2 = _hurwitz_em(ctx, s, a, d, prec+10, verbose) cancellation = ctx.mag(T1) - ctx.mag(T1+T2) if verbose: print("Term 1:", T1) print("Term 2:", T2) print("Cancellation:", cancellation, "bits") if cancellation < extraprec: return T1 + T2 else: extraprec = max(2*extraprec, min(cancellation + 5, 100*prec)) if extraprec > kwargs.get('maxprec', 100*prec): raise ctx.NoConvergence("zeta: too much cancellation") finally: ctx.prec = prec def _hurwitz_reflection(ctx, s, a, d, atype): # TODO: implement for derivatives if d != 0: raise NotImplementedError res = ctx.re(s) negs = -s # Integer reflection formula if ctx.isnpint(s): n = int(res) if n <= 0: return ctx.bernpoly(1-n, a) / (n-1) t = 1-s # We now require a to be standardized v = 0 shift = 0 b = a while ctx.re(b) > 1: b -= 1 v -= b**negs shift -= 1 while ctx.re(b) <= 0: v += b**negs b += 1 shift += 1 # Rational reflection formula if atype == 'Q' or atype == 'Z': try: p, q = a._mpq_ except: assert a == int(a) p = int(a) q = 1 p += shift*q assert 1 <= p <= q g = ctx.fsum(ctx.cospi(t/2-2*k*b)*ctx._hurwitz(t,(k,q)) \ for k in range(1,q+1)) g *= 2*ctx.gamma(t)/(2*ctx.pi*q)**t v += g return v # General reflection formula # Note: clcos/clsin can raise NotImplementedError else: C1, C2 = ctx.cospi_sinpi(0.5*t) # Clausen functions; could maybe use polylog directly if C1: C1 *= ctx.clcos(t, 2*a, pi=True) if C2: C2 *= ctx.clsin(t, 2*a, pi=True) v += 2*ctx.gamma(t)/(2*ctx.pi)**t*(C1+C2) return v def _hurwitz_em(ctx, s, a, d, prec, verbose): # May not be converted at this point a = ctx.convert(a) tol = -prec # Estimate number of terms for Euler-Maclaurin summation; could be improved M1 = 0 M2 = prec // 3 N = M2 lsum = 0 # This speeds up the recurrence for derivatives if ctx.isint(s): s = int(ctx._re(s)) s1 = s-1 while 1: # Truncated L-series l = ctx._zetasum(s, M1+a, M2-M1-1, [d])[0][0] #if d: # l = ctx.fsum((-ctx.ln(n+a))**d * (n+a)**negs for n in range(M1,M2)) #else: # l = ctx.fsum((n+a)**negs for n in range(M1,M2)) lsum += l M2a = M2+a logM2a = ctx.ln(M2a) logM2ad = logM2a**d logs = [logM2ad] logr = 1/logM2a rM2a = 1/M2a M2as = rM2a**s if d: tailsum = ctx.gammainc(d+1, s1*logM2a) / s1**(d+1) else: tailsum = 1/((s1)*(M2a)**s1) tailsum += 0.5 * logM2ad * M2as U = [1] r = M2as fact = 2 for j in range(1, N+1): # TODO: the following could perhaps be tidied a bit j2 = 2*j if j == 1: upds = [1] else: upds = [j2-2, j2-1] for m in upds: D = min(m,d+1) if m <= d: logs.append(logs[-1] * logr) Un = [0]*(D+1) for i in xrange(D): Un[i] = (1-m-s)*U[i] for i in xrange(1,D+1): Un[i] += (d-(i-1))*U[i-1] U = Un r *= rM2a t = ctx.fdot(U, logs) * r * ctx.bernoulli(j2)/(-fact) tailsum += t if ctx.mag(t) < tol: return lsum, (-1)**d * tailsum fact *= (j2+1)*(j2+2) if verbose: print("Sum range:", M1, M2, "term magnitude", ctx.mag(t), "tolerance", tol) M1, M2 = M2, M2*2 if ctx.re(s) < 0: N += N//2 @defun def _zetasum(ctx, s, a, n, derivatives=[0], reflect=False): """ Returns [xd0,xd1,...,xdr], [yd0,yd1,...ydr] where xdk = D^k ( 1/a^s + 1/(a+1)^s + ... + 1/(a+n)^s ) ydk = D^k conj( 1/a^(1-s) + 1/(a+1)^(1-s) + ... + 1/(a+n)^(1-s) ) D^k = kth derivative with respect to s, k ranges over the given list of derivatives (which should consist of either a single element or a range 0,1,...r). If reflect=False, the ydks are not computed. """ #print "zetasum", s, a, n try: return ctx._zetasum_fast(s, a, n, derivatives, reflect) except NotImplementedError: pass negs = ctx.fneg(s, exact=True) have_derivatives = derivatives != [0] have_one_derivative = len(derivatives) == 1 if not reflect: if not have_derivatives: return [ctx.fsum((a+k)**negs for k in xrange(n+1))], [] if have_one_derivative: d = derivatives[0] x = ctx.fsum(ctx.ln(a+k)**d * (a+k)**negs for k in xrange(n+1)) return [(-1)**d * x], [] maxd = max(derivatives) if not have_one_derivative: derivatives = range(maxd+1) xs = [ctx.zero for d in derivatives] if reflect: ys = [ctx.zero for d in derivatives] else: ys = [] for k in xrange(n+1): w = a + k xterm = w ** negs if reflect: yterm = ctx.conj(ctx.one / (w * xterm)) if have_derivatives: logw = -ctx.ln(w) if have_one_derivative: logw = logw ** maxd xs[0] += xterm * logw if reflect: ys[0] += yterm * logw else: t = ctx.one for d in derivatives: xs[d] += xterm * t if reflect: ys[d] += yterm * t t *= logw else: xs[0] += xterm if reflect: ys[0] += yterm return xs, ys @defun def dirichlet(ctx, s, chi=[1], derivative=0): s = ctx.convert(s) q = len(chi) d = int(derivative) if d > 2: raise NotImplementedError("arbitrary order derivatives") prec = ctx.prec try: ctx.prec += 10 if s == 1: have_pole = True for x in chi: if x and x != 1: have_pole = False h = +ctx.eps ctx.prec *= 2*(d+1) s += h if have_pole: return +ctx.inf z = ctx.zero for p in range(1,q+1): if chi[p%q]: if d == 1: z += chi[p%q] * (ctx.zeta(s, (p,q), 1) - \ ctx.zeta(s, (p,q))*ctx.log(q)) else: z += chi[p%q] * ctx.zeta(s, (p,q)) z /= q**s finally: ctx.prec = prec return +z def secondzeta_main_term(ctx, s, a, **kwargs): tol = ctx.eps f = lambda n: ctx.gammainc(0.5*s, a*gamm**2, regularized=True)*gamm**(-s) totsum = term = ctx.zero mg = ctx.inf n = 0 while mg > tol: totsum += term n += 1 gamm = ctx.im(ctx.zetazero_memoized(n)) term = f(n) mg = abs(term) err = 0 if kwargs.get("error"): sg = ctx.re(s) err = 0.5*ctx.pi**(-1)*max(1,sg)*a**(sg-0.5)*ctx.log(gamm/(2*ctx.pi))*\ ctx.gammainc(-0.5, a*gamm**2)/abs(ctx.gamma(s/2)) err = abs(err) return +totsum, err, n def secondzeta_prime_term(ctx, s, a, **kwargs): tol = ctx.eps f = lambda n: ctx.gammainc(0.5*(1-s),0.25*ctx.log(n)**2 * a**(-1))*\ ((0.5*ctx.log(n))**(s-1))*ctx.mangoldt(n)/ctx.sqrt(n)/\ (2*ctx.gamma(0.5*s)*ctx.sqrt(ctx.pi)) totsum = term = ctx.zero mg = ctx.inf n = 1 while mg > tol or n < 9: totsum += term n += 1 term = f(n) if term == 0: mg = ctx.inf else: mg = abs(term) if kwargs.get("error"): err = mg return +totsum, err, n def secondzeta_exp_term(ctx, s, a): if ctx.isint(s) and ctx.re(s) <= 0: m = int(round(ctx.re(s))) if not m & 1: return ctx.mpf('-0.25')**(-m//2) tol = ctx.eps f = lambda n: (0.25*a)**n/((n+0.5*s)*ctx.fac(n)) totsum = ctx.zero term = f(0) mg = ctx.inf n = 0 while mg > tol: totsum += term n += 1 term = f(n) mg = abs(term) v = a**(0.5*s)*totsum/ctx.gamma(0.5*s) return v def secondzeta_singular_term(ctx, s, a, **kwargs): factor = a**(0.5*(s-1))/(4*ctx.sqrt(ctx.pi)*ctx.gamma(0.5*s)) extraprec = ctx.mag(factor) ctx.prec += extraprec factor = a**(0.5*(s-1))/(4*ctx.sqrt(ctx.pi)*ctx.gamma(0.5*s)) tol = ctx.eps f = lambda n: ctx.bernpoly(n,0.75)*(4*ctx.sqrt(a))**n*\ ctx.gamma(0.5*n)/((s+n-1)*ctx.fac(n)) totsum = ctx.zero mg1 = ctx.inf n = 1 term = f(n) mg2 = abs(term) while mg2 > tol and mg2 <= mg1: totsum += term n += 1 term = f(n) totsum += term n +=1 term = f(n) mg1 = mg2 mg2 = abs(term) totsum += term pole = -2*(s-1)**(-2)+(ctx.euler+ctx.log(16*ctx.pi**2*a))*(s-1)**(-1) st = factor*(pole+totsum) err = 0 if kwargs.get("error"): if not ((mg2 > tol) and (mg2 <= mg1)): if mg2 <= tol: err = ctx.mpf(10)**int(ctx.log(abs(factor*tol),10)) if mg2 > mg1: err = ctx.mpf(10)**int(ctx.log(abs(factor*mg1),10)) err = max(err, ctx.eps*1.) ctx.prec -= extraprec return +st, err @defun def secondzeta(ctx, s, a = 0.015, **kwargs): r""" Evaluates the secondary zeta function `Z(s)`, defined for `\mathrm{Re}(s)>1` by .. math :: Z(s) = \sum_{n=1}^{\infty} \frac{1}{\tau_n^s} where `\frac12+i\tau_n` runs through the zeros of `\zeta(s)` with imaginary part positive. `Z(s)` extends to a meromorphic function on `\mathbb{C}` with a double pole at `s=1` and simple poles at the points `-2n` for `n=0`, 1, 2, ... **Examples** >>> from mpmath import * >>> mp.pretty = True; mp.dps = 15 >>> secondzeta(2) 0.023104993115419 >>> xi = lambda s: 0.5*s*(s-1)*pi**(-0.5*s)*gamma(0.5*s)*zeta(s) >>> Xi = lambda t: xi(0.5+t*j) >>> -0.5*diff(Xi,0,n=2)/Xi(0) (0.023104993115419 + 0.0j) We may ask for an approximate error value:: >>> secondzeta(0.5+100j, error=True) ((-0.216272011276718 - 0.844952708937228j), 2.22044604925031e-16) The function has poles at the negative odd integers, and dyadic rational values at the negative even integers:: >>> mp.dps = 30 >>> secondzeta(-8) -0.67236328125 >>> secondzeta(-7) +inf **Implementation notes** The function is computed as sum of four terms `Z(s)=A(s)-P(s)+E(s)-S(s)` respectively main, prime, exponential and singular terms. The main term `A(s)` is computed from the zeros of zeta. The prime term depends on the von Mangoldt function. The singular term is responsible for the poles of the function. The four terms depends on a small parameter `a`. We may change the value of `a`. Theoretically this has no effect on the sum of the four terms, but in practice may be important. A smaller value of the parameter `a` makes `A(s)` depend on a smaller number of zeros of zeta, but `P(s)` uses more values of von Mangoldt function. We may also add a verbose option to obtain data about the values of the four terms. >>> mp.dps = 10 >>> secondzeta(0.5 + 40j, error=True, verbose=True) main term = (-30190318549.138656312556 - 13964804384.624622876523j) computed using 19 zeros of zeta prime term = (132717176.89212754625045 + 188980555.17563978290601j) computed using 9 values of the von Mangoldt function exponential term = (542447428666.07179812536 + 362434922978.80192435203j) singular term = (512124392939.98154322355 + 348281138038.65531023921j) ((0.059471043 + 0.3463514534j), 1.455191523e-11) >>> secondzeta(0.5 + 40j, a=0.04, error=True, verbose=True) main term = (-151962888.19606243907725 - 217930683.90210294051982j) computed using 9 zeros of zeta prime term = (2476659342.3038722372461 + 28711581821.921627163136j) computed using 37 values of the von Mangoldt function exponential term = (178506047114.7838188264 + 819674143244.45677330576j) singular term = (175877424884.22441310708 + 790744630738.28669174871j) ((0.059471043 + 0.3463514534j), 1.455191523e-11) Notice the great cancellation between the four terms. Changing `a`, the four terms are very different numbers but the cancellation gives the good value of Z(s). **References** A. Voros, Zeta functions for the Riemann zeros, Ann. Institute Fourier, 53, (2003) 665--699. A. Voros, Zeta functions over Zeros of Zeta Functions, Lecture Notes of the Unione Matematica Italiana, Springer, 2009. """ s = ctx.convert(s) a = ctx.convert(a) tol = ctx.eps if ctx.isint(s) and ctx.re(s) <= 1: if abs(s-1) < tol*1000: return ctx.inf m = int(round(ctx.re(s))) if m & 1: return ctx.inf else: return ((-1)**(-m//2)*\ ctx.fraction(8-ctx.eulernum(-m,exact=True),2**(-m+3))) prec = ctx.prec try: t3 = secondzeta_exp_term(ctx, s, a) extraprec = max(ctx.mag(t3),0) ctx.prec += extraprec + 3 t1, r1, gt = secondzeta_main_term(ctx,s,a,error='True', verbose='True') t2, r2, pt = secondzeta_prime_term(ctx,s,a,error='True', verbose='True') t4, r4 = secondzeta_singular_term(ctx,s,a,error='True') t3 = secondzeta_exp_term(ctx, s, a) err = r1+r2+r4 t = t1-t2+t3-t4 if kwargs.get("verbose"): print('main term =', t1) print(' computed using', gt, 'zeros of zeta') print('prime term =', t2) print(' computed using', pt, 'values of the von Mangoldt function') print('exponential term =', t3) print('singular term =', t4) finally: ctx.prec = prec if kwargs.get("error"): w = max(ctx.mag(abs(t)),0) err = max(err*2**w, ctx.eps*1.*2**w) return +t, err return +t @defun_wrapped def lerchphi(ctx, z, s, a): r""" Gives the Lerch transcendent, defined for `|z| < 1` and `\Re{a} > 0` by .. math :: \Phi(z,s,a) = \sum_{k=0}^{\infty} \frac{z^k}{(a+k)^s} and generally by the recurrence `\Phi(z,s,a) = z \Phi(z,s,a+1) + a^{-s}` along with the integral representation valid for `\Re{a} > 0` .. math :: \Phi(z,s,a) = \frac{1}{2 a^s} + \int_0^{\infty} \frac{z^t}{(a+t)^s} dt - 2 \int_0^{\infty} \frac{\sin(t \log z - s \operatorname{arctan}(t/a)}{(a^2 + t^2)^{s/2} (e^{2 \pi t}-1)} dt. The Lerch transcendent generalizes the Hurwitz zeta function :func:`zeta` (`z = 1`) and the polylogarithm :func:`polylog` (`a = 1`). **Examples** Several evaluations in terms of simpler functions:: >>> from mpmath import * >>> mp.dps = 25; mp.pretty = True >>> lerchphi(-1,2,0.5); 4*catalan 3.663862376708876060218414 3.663862376708876060218414 >>> diff(lerchphi, (-1,-2,1), (0,1,0)); 7*zeta(3)/(4*pi**2) 0.2131391994087528954617607 0.2131391994087528954617607 >>> lerchphi(-4,1,1); log(5)/4 0.4023594781085250936501898 0.4023594781085250936501898 >>> lerchphi(-3+2j,1,0.5); 2*atanh(sqrt(-3+2j))/sqrt(-3+2j) (1.142423447120257137774002 + 0.2118232380980201350495795j) (1.142423447120257137774002 + 0.2118232380980201350495795j) Evaluation works for complex arguments and `|z| \ge 1`:: >>> lerchphi(1+2j, 3-j, 4+2j) (0.002025009957009908600539469 + 0.003327897536813558807438089j) >>> lerchphi(-2,2,-2.5) -12.28676272353094275265944 >>> lerchphi(10,10,10) (-4.462130727102185701817349e-11 + 1.575172198981096218823481e-12j) >>> lerchphi(10,10,-10.5) (112658784011940.5605789002 + 498113185.5756221777743631j) Some degenerate cases:: >>> lerchphi(0,1,2) 0.5 >>> lerchphi(0,1,-2) -0.5 **References** 1. [DLMF]_ section 25.14 """ if z == 0: return a ** (-s) """ # Faster, but these cases are useful for testing right now if z == 1: return ctx.zeta(s, a) if a == 1: return z * ctx.polylog(s, z) """ if ctx.re(a) < 1: if ctx.isnpint(a): raise ValueError("Lerch transcendent complex infinity") m = int(ctx.ceil(1-ctx.re(a))) v = ctx.zero zpow = ctx.one for n in xrange(m): v += zpow / (a+n)**s zpow *= z return zpow * ctx.lerchphi(z,s, a+m) + v g = ctx.ln(z) v = 1/(2*a**s) + ctx.gammainc(1-s, -a*g) * (-g)**(s-1) / z**a h = s / 2 r = 2*ctx.pi f = lambda t: ctx.sin(s*ctx.atan(t/a)-t*g) / \ ((a**2+t**2)**h * ctx.expm1(r*t)) v += 2*ctx.quad(f, [0, ctx.inf]) if not ctx.im(z) and not ctx.im(s) and not ctx.im(a) and ctx.re(z) < 1: v = ctx.chop(v) return v
GbalsaC/bitnamiP
venv/lib/python2.7/site-packages/sympy/mpmath/functions/zeta.py
Python
agpl-3.0
35,983
# Copyright 2017 gRPC authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Common utilities for tests of the Cython layer of gRPC Python.""" import collections import threading from grpc._cython import cygrpc RPC_COUNT = 4000 EMPTY_FLAGS = 0 INVOCATION_METADATA = ( ('client-md-key', 'client-md-key'), ('client-md-key-bin', b'\x00\x01' * 3000), ) INITIAL_METADATA = ( ('server-initial-md-key', 'server-initial-md-value'), ('server-initial-md-key-bin', b'\x00\x02' * 3000), ) TRAILING_METADATA = ( ('server-trailing-md-key', 'server-trailing-md-value'), ('server-trailing-md-key-bin', b'\x00\x03' * 3000), ) class QueueDriver(object): def __init__(self, condition, completion_queue): self._condition = condition self._completion_queue = completion_queue self._due = collections.defaultdict(int) self._events = collections.defaultdict(list) def add_due(self, tags): if not self._due: def in_thread(): while True: event = self._completion_queue.poll() with self._condition: self._events[event.tag].append(event) self._due[event.tag] -= 1 self._condition.notify_all() if self._due[event.tag] <= 0: self._due.pop(event.tag) if not self._due: return thread = threading.Thread(target=in_thread) thread.start() for tag in tags: self._due[tag] += 1 def event_with_tag(self, tag): with self._condition: while True: if self._events[tag]: return self._events[tag].pop(0) else: self._condition.wait() def execute_many_times(behavior): return tuple(behavior() for _ in range(RPC_COUNT)) class OperationResult( collections.namedtuple('OperationResult', ( 'start_batch_result', 'completion_type', 'success', ))): pass SUCCESSFUL_OPERATION_RESULT = OperationResult( cygrpc.CallError.ok, cygrpc.CompletionType.operation_complete, True) class RpcTest(object): def setUp(self): self.server_completion_queue = cygrpc.CompletionQueue() self.server = cygrpc.Server([(b'grpc.so_reuseport', 0)]) self.server.register_completion_queue(self.server_completion_queue) port = self.server.add_http2_port(b'[::]:0') self.server.start() self.channel = cygrpc.Channel('localhost:{}'.format(port).encode(), [], None) self._server_shutdown_tag = 'server_shutdown_tag' self.server_condition = threading.Condition() self.server_driver = QueueDriver(self.server_condition, self.server_completion_queue) with self.server_condition: self.server_driver.add_due({ self._server_shutdown_tag, }) self.client_condition = threading.Condition() self.client_completion_queue = cygrpc.CompletionQueue() self.client_driver = QueueDriver(self.client_condition, self.client_completion_queue) def tearDown(self): self.server.shutdown(self.server_completion_queue, self._server_shutdown_tag) self.server.cancel_all_calls()
endlessm/chromium-browser
third_party/grpc/src/src/python/grpcio_tests/tests/unit/_cython/_common.py
Python
bsd-3-clause
4,044
#!/usr/bin/python2.4 # Copyright 2009, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Test for environment_tools. These are SMALL and MEDIUM tests.""" import os import unittest import TestFramework class EnvToolsTests(unittest.TestCase): """Tests for environment_tools module.""" def setUp(self): """Per-test setup.""" self.env = self.root_env.Clone() def testFilterOut(self): """Test FilterOut().""" env = self.env env.Replace( TEST1=['ant', 'bear', 'cat'], TEST2=[1, 2, 3, 4], ) # Simple filter env.FilterOut(TEST1=['bear']) self.assertEqual(env['TEST1'], ['ant', 'cat']) # Filter multiple env.FilterOut(TEST1=['ant'], TEST2=[1, 3]) self.assertEqual(env['TEST1'], ['cat']) self.assertEqual(env['TEST2'], [2, 4]) # Filter doesn't care if the variable or value doesn't exist env.FilterOut(TEST1=['dog'], TEST3=[2]) self.assertEqual(env['TEST1'], ['cat']) self.assertEqual(env['TEST2'], [2, 4]) def testFilterOutRepeated(self): """Test FilterOut() filters all matches.""" env = self.env env['TEST3'] = ['A', 'B', 'B', 'C'] env.FilterOut(TEST3=['B']) self.assertEqual(env['TEST3'], ['A', 'C']) def testFilterOutNested(self): """Test FilterOut on nested lists.""" env = self.env # FilterOut does not currently flatten lists, nor remove values from # sub-lists. This is related to not evaluating environment variables (see # below). env['TEST4'] = ['A', ['B', 'C'], 'D'] env.FilterOut(TEST4=['B']) self.assertEqual(env['TEST4'], ['A', ['B', 'C'], 'D']) # If you specify the entire sub-list, it will be filtered env.FilterOut(TEST4=[['B', 'C']]) self.assertEqual(env['TEST4'], ['A', 'D']) def testFilterOutNoEval(self): """Test FilterOut does not evaluate variables in the list.""" env = self.env # FilterOut does not evaluate variables in the list. (Doing so would # defeat much of the purpose of variables.) Note that this means it does # not filter variables which evaluate partially or wholly to the filtered # string. On the plus side, this means you CAN filter out variables. env.Replace( TEST5=['$V1', '$V2', '$V3', '$V4'], V1='A', # (V2 intentionally undefined at this point) V3=['A', 'B'], V4='C', ) env.FilterOut(TEST5=['A', '$V4']) self.assertEqual(env['TEST5'], ['$V1', '$V2', '$V3']) def testOverlap(self): """Test Overlap().""" env = self.env env.Replace( OLVAR='baz', OLLIST=['2', '3', '4'], ) # Simple string compares self.assertEqual(env.Overlap('foo', 'foo'), ['foo']) self.assertEqual(env.Overlap('foo', 'food'), []) # String compare with variable substitution self.assertEqual(env.Overlap('foobaz', 'foo$OLVAR'), ['foobaz']) # Simple list overlap # Need to use set() for comparison, since the order of entries in the # output list is indeterminate self.assertEqual(set(env.Overlap(['1', '2', '3'], ['2', '3', '4'])), set(['2', '3'])) # Overlap removes duplicates self.assertEqual(env.Overlap(['1', '2', '2'], ['2', '3', '2']), ['2']) # List and string self.assertEqual(env.Overlap('3', ['1', '2', '3']), ['3']) self.assertEqual(env.Overlap('4', ['1', '2', '3']), []) self.assertEqual(env.Overlap(['1', '$OLVAR', '3'], '$OLVAR'), ['baz']) # Variable substitition will replace and flatten lists self.assertEqual(set(env.Overlap(['1', '2', '3'], '$OLLIST')), set(['2', '3'])) # Substitution flattens lists self.assertEqual(set(env.Overlap([['1', '2'], '3'], ['2', ['3', '4']])), set(['2', '3'])) def testSubstList2(self): """Test SubstList2().""" env = self.env # Empty args should return empty list self.assertEqual(env.SubstList2(), []) # Undefined variable also returns empty list self.assertEqual(env.SubstList2('$NO_SUCH_VAR'), []) # Simple substitution (recursively evaluates variables) env['STR1'] = 'FOO$STR2' env['STR2'] = 'BAR' self.assertEqual(env.SubstList2('$STR1'), ['FOOBAR']) # Simple list substitution env['LIST1'] = ['A', 'B'] self.assertEqual(env.SubstList2('$LIST1'), ['A', 'B']) # Nested lists env['LIST2'] = ['C', '$LIST1'] self.assertEqual(env.SubstList2('$LIST2'), ['C', 'A', 'B']) # Multiple variables in a single entry stay a single entry self.assertEqual(env.SubstList2('$STR1 $STR2'), ['FOOBAR BAR']) # Multiple args to command self.assertEqual(env.SubstList2('$LIST2', '$STR2'), ['C', 'A', 'B', 'BAR']) # Items in list are actually strings, not some subclass self.assert_(type(env.SubstList2('$STR1')[0]) is str) def testRelativePath(self): """Test RelativePath().""" env = self.env # Trivial cases - directory or file relative to itself self.assertEqual(env.RelativePath('a', 'a'), '.') self.assertEqual(env.RelativePath('a/b/c', 'a/b/c'), '.') self.assertEqual(env.RelativePath('a', 'a', source_is_file=True), 'a') self.assertEqual(env.RelativePath('a/b/c', 'a/b/c', source_is_file=True), 'c') # Can pass in directory or file nodes self.assertEqual(env.RelativePath(env.Dir('a'), env.File('b/c'), sep='/'), '../b/c') # Separator argument is respected self.assertEqual(env.RelativePath('.', 'a/b/c', sep='BOOGA'), 'aBOOGAbBOOGAc') # Default separator is os.sep self.assertEqual(env.RelativePath('.', 'a/b'), 'a' + os.sep + 'b') # No common dirs self.assertEqual(env.RelativePath('a/b/c', 'd/e/f', sep='/'), '../../../d/e/f') self.assertEqual( env.RelativePath('a/b/c', 'd/e/f', sep='/', source_is_file=True), '../../d/e/f') # Common dirs self.assertEqual(env.RelativePath('a/b/c/d', 'a/b/e/f', sep='/'), '../../e/f') # Source or destination path is different length self.assertEqual(env.RelativePath('a/b/c/d', 'a/b', sep='/'), '../..') self.assertEqual(env.RelativePath('a/b', 'a/b/c/d', sep='/'), 'c/d') # Current directory on either side self.assertEqual(env.RelativePath('a/b/c', '.', sep='/'), '../../..') self.assertEqual(env.RelativePath('.', 'a/b/c', sep='/'), 'a/b/c') # Variables are evaluated env.Replace( DIR1='foo', DIR2='bar', ) self.assertEqual(env.RelativePath('foo/$DIR2/a', '$DIR1/bar/b', sep='/'), '../b') def testApplyBuildSConscript(self): """Test ApplySConscript() and BuildSConscript() (MEDIUM test).""" env = self.env env['SUB1'] = 'nougat' # ApplySConscript() affects the calling environment env.ApplySConscript('SConscript1') self.assertEqual(env.get('SUB2'), 'orange') # BuildSConscript() does not affect the calling environment env.BuildSConscript('SConscript2') self.assertEqual(env.get('SUB2'), 'orange') # BuildSConscript finds build.scons in preference to SConscript env.BuildSConscript('abs1') # But does look for SConscript if there isn't build.scons env.BuildSConscript('abs2') def TestSConstruct(scons_globals): """Test SConstruct file. Args: scons_globals: Global variables dict from the SConscript file. """ # Get globals from SCons Environment = scons_globals['Environment'] env = Environment(tools=['environment_tools']) # Run unit tests TestFramework.RunUnitTests(EnvToolsTests, root_env=env) sconscript1_contents = """ Import('env') if env.get('SUB1') != 'nougat': raise ValueError('ApplySConscript() failure in sconscript1') env['SUB2'] = 'orange' """ sconscript2_contents = """ Import('env') if env.get('SUB1') != 'nougat': raise ValueError('BuildSConscript() failure in sconscript2') env['SUB2'] = 'pizza' """ sconscript3_contents = """ Import('env') filename = '%s' env.Execute(Touch(filename)) """ def main(): test = TestFramework.TestFramework() test.subdir('environment_tools') base = 'environment_tools/' test.WriteSConscript(base + 'SConstruct', TestSConstruct) test.write(base + 'SConscript1', sconscript1_contents) test.write(base + 'SConscript2', sconscript2_contents) test.subdir(base + 'abs1') test.write(base + 'abs1/build.scons', sconscript3_contents % 'yes1') test.write(base + 'abs1/SConscript', sconscript3_contents % 'no') test.subdir(base + 'abs2') test.write(base + 'abs2/SConscript', sconscript3_contents % 'yes2') # Ignore stderr since unittest prints its output there test.run(chdir=base, stderr=None) test.must_exist(base + 'abs1/yes1') test.must_not_exist(base + 'abs1/no') test.must_exist(base + 'abs2/yes2') test.pass_test() if __name__ == '__main__': main()
nguyentran/openviber
tools/swtoolkit/test/environment_tools_test.py
Python
mit
10,313
# pylint: disable=missing-docstring # pylint: disable=redefined-outer-name # pylint: disable=unused-argument from lettuce import step, world from common import * ############### ACTIONS #################### @step('There are no courses$') def no_courses(step): world.clear_courses() create_studio_user() @step('I click the New Course button$') def i_click_new_course(step): world.css_click('.new-course-button') @step('I fill in the new course information$') def i_fill_in_a_new_course_information(step): fill_in_course_info() @step('I create a course with "([^"]*)", "([^"]*)", "([^"]*)", and "([^"]*)"') def i_create_course(step, name, org, number, run): fill_in_course_info(name=name, org=org, num=number, run=run) @step('I create a new course$') def i_create_a_course(step): create_a_course() @step('I click the course link in Studio Home$') def i_click_the_course_link_in_studio_home(step): # pylint: disable=invalid-name course_css = 'a.course-link' world.css_click(course_css) @step('I see an error about the length of the org/course/run tuple') def i_see_error_about_length(step): assert world.css_has_text( '#course_creation_error', 'The combined length of the organization, course number, ' 'and course run fields cannot be more than 65 characters.' ) ############ ASSERTIONS ################### @step('the Courseware page has loaded in Studio$') def courseware_page_has_loaded_in_studio(step): course_title_css = 'span.course-title' assert world.is_css_present(course_title_css) @step('I see the course listed in Studio Home$') def i_see_the_course_in_studio_home(step): course_css = 'h3.class-title' assert world.css_has_text(course_css, world.scenario_dict['COURSE'].display_name) @step('I am on the "([^"]*)" tab$') def i_am_on_tab(step, tab_name): header_css = 'div.inner-wrapper h1' assert world.css_has_text(header_css, tab_name) @step('I see a link for adding a new section$') def i_see_new_section_link(step): link_css = '.outline .button-new' assert world.css_has_text(link_css, 'New Section')
Stanford-Online/edx-platform
cms/djangoapps/contentstore/features/courses.py
Python
agpl-3.0
2,137
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests and Benchmarks for Densenet model.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import gc import time import tensorflow as tf import tensorflow.contrib.eager as tfe from tensorflow.contrib.eager.python.examples.densenet import densenet from tensorflow.python.client import device_lib class DensenetTest(tf.test.TestCase): def test_bottleneck_true(self): depth = 7 growth_rate = 2 num_blocks = 3 output_classes = 10 num_layers_in_each_block = -1 batch_size = 1 data_format = ('channels_first') if tf.test.is_gpu_available() else ( 'channels_last') model = densenet.DenseNet(depth, growth_rate, num_blocks, output_classes, num_layers_in_each_block, data_format, bottleneck=True, compression=0.5, weight_decay=1e-4, dropout_rate=0, pool_initial=False, include_top=True) if data_format == 'channels_last': rand_input = tf.random_uniform((batch_size, 32, 32, 3)) else: rand_input = tf.random_uniform((batch_size, 3, 32, 32)) output_shape = model(rand_input).shape self.assertEqual(output_shape, (batch_size, output_classes)) def test_bottleneck_false(self): depth = 7 growth_rate = 2 num_blocks = 3 output_classes = 10 num_layers_in_each_block = -1 batch_size = 1 data_format = ('channels_first') if tf.test.is_gpu_available() else ( 'channels_last') model = densenet.DenseNet(depth, growth_rate, num_blocks, output_classes, num_layers_in_each_block, data_format, bottleneck=False, compression=0.5, weight_decay=1e-4, dropout_rate=0, pool_initial=False, include_top=True) if data_format == 'channels_last': rand_input = tf.random_uniform((batch_size, 32, 32, 3)) else: rand_input = tf.random_uniform((batch_size, 3, 32, 32)) output_shape = model(rand_input).shape self.assertEqual(output_shape, (batch_size, output_classes)) def test_pool_initial_true(self): depth = 7 growth_rate = 2 num_blocks = 4 output_classes = 10 num_layers_in_each_block = [1, 2, 2, 1] batch_size = 1 data_format = ('channels_first') if tf.test.is_gpu_available() else ( 'channels_last') model = densenet.DenseNet(depth, growth_rate, num_blocks, output_classes, num_layers_in_each_block, data_format, bottleneck=True, compression=0.5, weight_decay=1e-4, dropout_rate=0, pool_initial=True, include_top=True) if data_format == 'channels_last': rand_input = tf.random_uniform((batch_size, 32, 32, 3)) else: rand_input = tf.random_uniform((batch_size, 3, 32, 32)) output_shape = model(rand_input).shape self.assertEqual(output_shape, (batch_size, output_classes)) def test_regularization(self): if tf.test.is_gpu_available(): rand_input = tf.random_uniform((10, 3, 32, 32)) data_format = 'channels_first' else: rand_input = tf.random_uniform((10, 32, 32, 3)) data_format = 'channels_last' weight_decay = 1e-4 conv = tf.keras.layers.Conv2D( 3, (3, 3), padding='same', use_bias=False, data_format=data_format, kernel_regularizer=tf.keras.regularizers.l2(weight_decay)) optimizer = tf.train.GradientDescentOptimizer(0.1) conv(rand_input) # Initialize the variables in the layer def compute_true_l2(vs, wd): return tf.reduce_sum(tf.square(vs)) * wd true_l2 = compute_true_l2(conv.variables, weight_decay) keras_l2 = tf.add_n(conv.losses) self.assertAllClose(true_l2, keras_l2) with tf.GradientTape() as tape_true, tf.GradientTape() as tape_keras: loss = tf.reduce_sum(conv(rand_input)) loss_with_true_l2 = loss + compute_true_l2(conv.variables, weight_decay) loss_with_keras_l2 = loss + tf.add_n(conv.losses) true_grads = tape_true.gradient(loss_with_true_l2, conv.variables) keras_grads = tape_keras.gradient(loss_with_keras_l2, conv.variables) self.assertAllClose(true_grads, keras_grads) optimizer.apply_gradients(zip(keras_grads, conv.variables)) keras_l2_after_update = tf.add_n(conv.losses) self.assertNotAllClose(keras_l2, keras_l2_after_update) def compute_gradients(model, images, labels): with tf.GradientTape() as tape: logits = model(images, training=True) cross_ent = tf.losses.softmax_cross_entropy( logits=logits, onehot_labels=labels) regularization = tf.add_n(model.losses) loss = cross_ent + regularization tf.contrib.summary.scalar(name='loss', tensor=loss) return tape.gradient(loss, model.variables) def apply_gradients(model, optimizer, gradients): optimizer.apply_gradients(zip(gradients, model.variables)) def device_and_data_format(): return ('/gpu:0', 'channels_first') if tf.test.is_gpu_available() else ('/cpu:0', 'channels_last') def random_batch(batch_size, data_format): shape = (3, 224, 224) if data_format == 'channels_first' else (224, 224, 3) shape = (batch_size,) + shape num_classes = 1000 images = tf.random_uniform(shape) labels = tf.random_uniform( [batch_size], minval=0, maxval=num_classes, dtype=tf.int32) one_hot = tf.one_hot(labels, num_classes) return images, one_hot class MockIterator(object): def __init__(self, tensors): self._tensors = [tf.identity(x) for x in tensors] def next(self): return self._tensors class DensenetBenchmark(tf.test.Benchmark): def __init__(self): self.depth = 121 self.growth_rate = 32 self.num_blocks = 4 self.output_classes = 1000 self.num_layers_in_each_block = [6, 12, 24, 16] def _train_batch_sizes(self): """Choose batch sizes based on GPU capability.""" for device in device_lib.list_local_devices(): if tf.DeviceSpec.from_string(device.name).device_type == 'GPU': if 'K20' in device.physical_device_desc: return (16,) if 'P100' in device.physical_device_desc: return (16, 32, 64) if tf.DeviceSpec.from_string(device.name).device_type == 'TPU': return (32,) return (16, 32) def _report(self, label, start, num_iters, device, batch_size, data_format): avg_time = (time.time() - start) / num_iters dev = tf.DeviceSpec.from_string(device).device_type.lower() name = '%s_%s_batch_%d_%s' % (label, dev, batch_size, data_format) extras = {'examples_per_sec': batch_size / avg_time} self.report_benchmark( iters=num_iters, wall_time=avg_time, name=name, extras=extras) def _force_device_sync(self): # If this function is called in the context of a non-CPU device # (e.g., inside a 'with tf.device("/gpu:0")' block) # then this will force a copy from CPU->NON_CPU_DEVICE->CPU, # which forces a sync. This is a roundabout way, yes. tf.constant(1.).cpu() def _benchmark_eager_apply(self, label, device_and_format, defun=False, execution_mode=None): with tfe.execution_mode(execution_mode): device, data_format = device_and_format model = densenet.DenseNet(self.depth, self.growth_rate, self.num_blocks, self.output_classes, self.num_layers_in_each_block, data_format, bottleneck=True, compression=0.5, weight_decay=1e-4, dropout_rate=0, pool_initial=True, include_top=True) if defun: # TODO(apassos) enable tfe.function here model.call = tfe.defun(model.call) batch_size = 64 num_burn = 5 num_iters = 30 with tf.device(device): images, _ = random_batch(batch_size, data_format) for _ in xrange(num_burn): model(images, training=False).cpu() if execution_mode: tfe.async_wait() gc.collect() start = time.time() for _ in xrange(num_iters): model(images, training=False).cpu() if execution_mode: tfe.async_wait() self._report(label, start, num_iters, device, batch_size, data_format) def benchmark_eager_apply_sync(self): self._benchmark_eager_apply('eager_apply', device_and_data_format(), defun=False) def benchmark_eager_apply_async(self): self._benchmark_eager_apply( 'eager_apply_async', device_and_data_format(), defun=False, execution_mode=tfe.ASYNC) def benchmark_eager_apply_with_defun(self): self._benchmark_eager_apply('eager_apply_with_defun', device_and_data_format(), defun=True) def _benchmark_eager_train(self, label, make_iterator, device_and_format, defun=False, execution_mode=None): with tfe.execution_mode(execution_mode): device, data_format = device_and_format for batch_size in self._train_batch_sizes(): (images, labels) = random_batch(batch_size, data_format) model = densenet.DenseNet(self.depth, self.growth_rate, self.num_blocks, self.output_classes, self.num_layers_in_each_block, data_format, bottleneck=True, compression=0.5, weight_decay=1e-4, dropout_rate=0, pool_initial=True, include_top=True) optimizer = tf.train.GradientDescentOptimizer(0.1) apply_grads = apply_gradients if defun: model.call = tfe.defun(model.call) apply_grads = tfe.defun(apply_gradients) num_burn = 3 num_iters = 10 with tf.device(device): iterator = make_iterator((images, labels)) for _ in xrange(num_burn): (images, labels) = iterator.next() apply_grads(model, optimizer, compute_gradients(model, images, labels)) if execution_mode: tfe.async_wait() self._force_device_sync() gc.collect() start = time.time() for _ in xrange(num_iters): (images, labels) = iterator.next() apply_grads(model, optimizer, compute_gradients(model, images, labels)) if execution_mode: tfe.async_wait() self._force_device_sync() self._report(label, start, num_iters, device, batch_size, data_format) def benchmark_eager_train_sync(self): self._benchmark_eager_train('eager_train', MockIterator, device_and_data_format(), defun=False) def benchmark_eager_train_async(self): self._benchmark_eager_train( 'eager_train_async', MockIterator, device_and_data_format(), defun=False, execution_mode=tfe.ASYNC) def benchmark_eager_train_with_defun(self): self._benchmark_eager_train( 'eager_train_with_defun', MockIterator, device_and_data_format(), defun=True) def benchmark_eager_train_datasets(self): def make_iterator(tensors): with tf.device('/device:CPU:0'): ds = tf.data.Dataset.from_tensors(tensors).repeat() return tfe.Iterator(ds) self._benchmark_eager_train( 'eager_train_dataset', make_iterator, device_and_data_format(), defun=False) def benchmark_eager_train_datasets_with_defun(self): def make_iterator(tensors): with tf.device('/device:CPU:0'): ds = tf.data.Dataset.from_tensors(tensors).repeat() return tfe.Iterator(ds) self._benchmark_eager_train( 'eager_train_dataset_with_defun', make_iterator, device_and_data_format(), defun=True) if __name__ == '__main__': tf.enable_eager_execution() tf.test.main()
ghchinoy/tensorflow
tensorflow/contrib/eager/python/examples/densenet/densenet_test.py
Python
apache-2.0
12,967
#!/usr/bin/python # # (c) 2018 Extreme Networks Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = """ --- module: exos_facts version_added: "2.7" author: - "Lance Richardson (@hlrichardson)" - "Ujwal Koamrla (@ujwalkomarla)" short_description: Collect facts from devices running Extreme EXOS description: - Collects a base set of device facts from a remote device that is running EXOS. This module prepends all of the base network fact keys with C(ansible_net_<fact>). The facts module will always collect a base set of facts from the device and can enable or disable collection of additional facts. notes: - Tested against EXOS 22.5.1.7 options: gather_subset: description: - When supplied, this argument will restrict the facts collected to a given subset. Possible values for this argument include all, hardware, config, and interfaces. Can specify a list of values to include a larger subset. Values can also be used with an initial C(M(!)) to specify that a specific subset should not be collected. required: false type: list default: ['!config'] gather_network_resources: description: - When supplied, this argument will restrict the facts collected to a given subset. Possible values for this argument include all and the resources like interfaces, vlans etc. Can specify a list of values to include a larger subset. Values can also be used with an initial C(M(!)) to specify that a specific subset should not be collected. Valid subsets are 'all', 'lldp_global'. type: list version_added: "2.9" """ EXAMPLES = """ - name: Gather all legacy facts exos_facts: gather_subset: all - name: Gather only the config and default facts exos_facts: gather_subset: config - name: do not gather hardware facts exos_facts: gather_subset: "!hardware" - name: Gather legacy and resource facts exos_facts: gather_subset: all gather_network_resources: all - name: Gather only the lldp global resource facts and no legacy facts exos_facts: gather_subset: - '!all' - '!min' gather_network_resource: - lldp_global - name: Gather lldp global resource and minimal legacy facts exos_facts: gather_subset: min gather_network_resource: lldp_global """ RETURN = """ ansible_net_gather_subset: description: The list of fact subsets collected from the device returned: always type: list ansible_net_gather_network_resources: description: The list of fact for network resource subsets collected from the device returned: when the resource is configured type: list # default ansible_net_model: description: The model name returned from the device returned: always type: str ansible_net_serialnum: description: The serial number of the remote device returned: always type: str ansible_net_version: description: The operating system version running on the remote device returned: always type: str ansible_net_hostname: description: The configured hostname of the device returned: always type: str # hardware ansible_net_memfree_mb: description: The available free memory on the remote device in Mb returned: when hardware is configured type: int ansible_net_memtotal_mb: description: The total memory on the remote device in Mb returned: when hardware is configured type: int # config ansible_net_config: description: The current active config from the device returned: when config is configured type: str # interfaces ansible_net_all_ipv4_addresses: description: All IPv4 addresses configured on the device returned: when interfaces is configured type: list ansible_net_all_ipv6_addresses: description: All Primary IPv6 addresses configured on the device returned: when interfaces is configured type: list ansible_net_interfaces: description: A hash of all interfaces running on the system returned: when interfaces is configured type: dict ansible_net_neighbors: description: The list of LLDP neighbors from the remote device returned: when interfaces is configured type: dict """ from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.exos.argspec.facts.facts import FactsArgs from ansible.module_utils.network.exos.facts.facts import Facts def main(): """Main entry point for AnsibleModule """ argument_spec = FactsArgs.argument_spec module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) warnings = ['default value for `gather_subset` ' 'will be changed to `min` from `!config` v2.11 onwards'] result = Facts(module).get_facts() ansible_facts, additional_warnings = result warnings.extend(additional_warnings) module.exit_json(ansible_facts=ansible_facts, warnings=warnings) if __name__ == '__main__': main()
thaim/ansible
lib/ansible/modules/network/exos/exos_facts.py
Python
mit
5,863
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Sparsemax op.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import nn __all__ = ["sparsemax"] def sparsemax(logits, name=None): """Computes sparsemax activations [1]. For each batch `i` and class `j` we have $$sparsemax[i, j] = max(logits[i, j] - tau(logits[i, :]), 0)$$ [1]: https://arxiv.org/abs/1602.02068 Args: logits: A `Tensor`. Must be one of the following types: `half`, `float32`, `float64`. name: A name for the operation (optional). Returns: A `Tensor`. Has the same type as `logits`. """ with ops.name_scope(name, "sparsemax", [logits]) as name: logits = ops.convert_to_tensor(logits, name="logits") obs = array_ops.shape(logits)[0] dims = array_ops.shape(logits)[1] # In the paper, they call the logits z. # The mean(logits) can be substracted from logits to make the algorithm # more numerically stable. the instability in this algorithm comes mostly # from the z_cumsum. Substacting the mean will cause z_cumsum to be close # to zero. However, in practise the numerical instability issues are very # minor and substacting the mean causes extra issues with inf and nan # input. z = logits # sort z z_sorted, _ = nn.top_k(z, k=dims) # calculate k(z) z_cumsum = math_ops.cumsum(z_sorted, axis=1) k = math_ops.range( 1, math_ops.cast(dims, logits.dtype) + 1, dtype=logits.dtype) z_check = 1 + k * z_sorted > z_cumsum # because the z_check vector is always [1,1,...1,0,0,...0] finding the # (index + 1) of the last `1` is the same as just summing the number of 1. k_z = math_ops.reduce_sum(math_ops.cast(z_check, dtypes.int32), axis=1) # calculate tau(z) # If there are inf values or all values are -inf, the k_z will be zero, # this is mathematically invalid and will also cause the gather_nd to fail. # Prevent this issue for now by setting k_z = 1 if k_z = 0, this is then # fixed later (see p_safe) by returning p = nan. This results in the same # behavior as softmax. k_z_safe = math_ops.maximum(k_z, 1) indices = array_ops.stack([math_ops.range(0, obs), k_z_safe - 1], axis=1) tau_sum = array_ops.gather_nd(z_cumsum, indices) tau_z = (tau_sum - 1) / math_ops.cast(k_z, logits.dtype) # calculate p p = math_ops.maximum( math_ops.cast(0, logits.dtype), z - tau_z[:, array_ops.newaxis]) # If k_z = 0 or if z = nan, then the input is invalid p_safe = array_ops.where( math_ops.logical_or( math_ops.equal(k_z, 0), math_ops.is_nan(z_cumsum[:, -1])), array_ops.fill([obs, dims], math_ops.cast(float("nan"), logits.dtype)), p) return p_safe
ghchinoy/tensorflow
tensorflow/contrib/sparsemax/python/ops/sparsemax.py
Python
apache-2.0
3,656
# -*- coding: utf-8 -*- # ########################## Copyrights and license ############################ # # # Copyright 2012 Vincent Jacques <[email protected]> # # Copyright 2012 Zearin <[email protected]> # # Copyright 2013 AKFish <[email protected]> # # Copyright 2013 Bill Mill <[email protected]> # # Copyright 2013 Vincent Jacques <[email protected]> # # Copyright 2013 davidbrai <[email protected]> # # # # This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ # # # # PyGithub is free software: you can redistribute it and/or modify it under # # the terms of the GNU Lesser General Public License as published by the Free # # Software Foundation, either version 3 of the License, or (at your option) # # any later version. # # # # PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY # # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # # details. # # # # You should have received a copy of the GNU Lesser General Public License # # along with PyGithub. If not, see <http://www.gnu.org/licenses/>. # # # # ############################################################################## import github.GithubObject class PaginatedListBase: def __init__(self): self.__elements = list() def __getitem__(self, index): assert isinstance(index, (int, slice)) if isinstance(index, (int, long)): self.__fetchToIndex(index) return self.__elements[index] else: return self._Slice(self, index) def __iter__(self): for element in self.__elements: yield element while self._couldGrow(): newElements = self._grow() for element in newElements: yield element def _isBiggerThan(self, index): return len(self.__elements) > index or self._couldGrow() def __fetchToIndex(self, index): while len(self.__elements) <= index and self._couldGrow(): self._grow() def _grow(self): newElements = self._fetchNextPage() self.__elements += newElements return newElements class _Slice: def __init__(self, theList, theSlice): self.__list = theList self.__start = theSlice.start or 0 self.__stop = theSlice.stop self.__step = theSlice.step or 1 def __iter__(self): index = self.__start while not self.__finished(index): if self.__list._isBiggerThan(index): yield self.__list[index] index += self.__step else: return def __finished(self, index): return self.__stop is not None and index >= self.__stop class PaginatedList(PaginatedListBase): """ This class abstracts the `pagination of the API <http://developer.github.com/v3/#pagination>`_. You can simply enumerate through instances of this class:: for repo in user.get_repos(): print repo.name You can also index them or take slices:: second_repo = user.get_repos()[1] first_repos = user.get_repos()[:10] If you want to iterate in reversed order, just do:: for repo in user.get_repos().reversed: print repo.name And if you really need it, you can explicitely access a specific page:: some_repos = user.get_repos().get_page(0) some_other_repos = user.get_repos().get_page(3) """ def __init__(self, contentClass, requester, firstUrl, firstParams, headers=None): PaginatedListBase.__init__(self) self.__requester = requester self.__contentClass = contentClass self.__firstUrl = firstUrl self.__firstParams = firstParams or () self.__nextUrl = firstUrl self.__nextParams = firstParams or {} self.__headers = headers if self.__requester.per_page != 30: self.__nextParams["per_page"] = self.__requester.per_page self._reversed = False self.__totalCount = None @property def totalCount(self): if not self.__totalCount: self._grow() return self.__totalCount def _getLastPageUrl(self): headers, data = self.__requester.requestJsonAndCheck( "GET", self.__firstUrl, parameters=self.__nextParams, headers=self.__headers ) links = self.__parseLinkHeader(headers) lastUrl = links.get("last") return lastUrl @property def reversed(self): r = PaginatedList(self.__contentClass, self.__requester, self.__firstUrl, self.__firstParams) r.__reverse() return r def __reverse(self): self._reversed = True lastUrl = self._getLastPageUrl() if lastUrl: self.__nextUrl = lastUrl def _couldGrow(self): return self.__nextUrl is not None def _fetchNextPage(self): headers, data = self.__requester.requestJsonAndCheck( "GET", self.__nextUrl, parameters=self.__nextParams, headers=self.__headers ) data = data if data else [] self.__nextUrl = None if len(data) > 0: links = self.__parseLinkHeader(headers) if self._reversed: if "prev" in links: self.__nextUrl = links["prev"] elif "next" in links: self.__nextUrl = links["next"] self.__nextParams = None if 'items' in data: self.__totalCount = data['total_count'] data = data["items"] content = [ self.__contentClass(self.__requester, headers, element, completed=False) for element in data if element is not None ] if self._reversed: return content[::-1] return content def __parseLinkHeader(self, headers): links = {} if "link" in headers: linkHeaders = headers["link"].split(", ") for linkHeader in linkHeaders: (url, rel) = linkHeader.split("; ") url = url[1:-1] rel = rel[5:-1] links[rel] = url return links def get_page(self, page): params = dict(self.__firstParams) if page != 0: params["page"] = page + 1 if self.__requester.per_page != 30: params["per_page"] = self.__requester.per_page headers, data = self.__requester.requestJsonAndCheck( "GET", self.__firstUrl, parameters=params, headers=self.__headers ) if 'items' in data: self.__totalCount = data['total_count'] data = data["items"] return [ self.__contentClass(self.__requester, headers, element, completed=False) for element in data ]
cytec/SickRage
lib/github/PaginatedList.py
Python
gpl-3.0
7,862
import os import shutil import tempfile import vmprof import prof_six as six from _prof_imports import TreeStats, CallTreeStat class VmProfProfile(object): """ Wrapper class that represents VmProf Python profiling backend with API matching the cProfile. """ def __init__(self): self.stats = None self.basepath = None self.file = None self.is_enabled = False def runcall(self, func, *args, **kw): self.enable() try: return func(*args, **kw) finally: self.disable() def enable(self): if not self.is_enabled: if not os.path.exists(self.basepath): os.makedirs(self.basepath) self.file = tempfile.NamedTemporaryFile(delete=False, dir=self.basepath) try: vmprof.enable(self.file.fileno(), lines=True) except: vmprof.enable(self.file.fileno()) self.is_enabled = True def disable(self): if self.is_enabled: vmprof.disable() self.file.close() self.is_enabled = False def create_stats(self): return None def getstats(self): self.create_stats() return self.stats def dump_stats(self, file): shutil.copyfile(self.file.name, file) def _walk_tree(self, parent, node, callback): tree = callback(parent, node) for c in six.itervalues(node.children): self._walk_tree(node, c, callback) return tree def tree_stats_to_response(self, filename, response): tree_stats_to_response(filename, response) def snapshot_extension(self): return '.prof' def _walk_tree(parent, node, callback): if node is None: return None tree = callback(parent, node) for c in six.itervalues(node.children): _walk_tree(tree, c, callback) return tree def tree_stats_to_response(filename, response): stats = vmprof.read_profile(filename) response.tree_stats = TreeStats() response.tree_stats.sampling_interval = vmprof.DEFAULT_PERIOD try: tree = stats.get_tree() except vmprof.stats.EmptyProfileFile: tree = None def convert(parent, node): tstats = CallTreeStat() tstats.name = node.name tstats.count = node.count tstats.children = [] tstats.line_count = getattr(node, 'lines', {}) if parent is not None: if parent.children is None: parent.children = [] parent.children.append(tstats) return tstats response.tree_stats.call_tree = _walk_tree(None, tree, convert)
siosio/intellij-community
python/helpers/profiler/vmprof_profiler.py
Python
apache-2.0
2,691
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2013 Zuza Software Foundation # # This file is part of translate. # # translate is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # translate is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. from pytest import mark from translate.misc import wStringIO from translate.storage import dtd, test_monolingual def test_roundtrip_quoting(): specials = [ 'Fish & chips', 'five < six', 'six > five', 'Use &nbsp;', 'Use &amp;nbsp;A "solution"', "skop 'n bal", '"""', "'''", '\n', '\t', '\r', 'Escape at end \\', '', '\\n', '\\t', '\\r', '\\"', '\r\n', '\\r\\n', '\\', "Completed %S", "&blockAttackSites;", "&#x00A0;", "&intro-point2-a;", "&basePBMenu.label;", #"Don't buy", #"Don't \"buy\"", "A \"thing\"", "<a href=\"http" ] for special in specials: quoted_special = dtd.quotefordtd(special) unquoted_special = dtd.unquotefromdtd(quoted_special) print("special: %r\nquoted: %r\nunquoted: %r\n" % (special, quoted_special, unquoted_special)) assert special == unquoted_special @mark.xfail(reason="Not Implemented") def test_quotefordtd_unimplemented_cases(): """Test unimplemented quoting DTD cases.""" assert dtd.quotefordtd("Between <p> and </p>") == ('"Between &lt;p&gt; and' ' &lt;/p&gt;"') def test_quotefordtd(): """Test quoting DTD definitions""" assert dtd.quotefordtd('') == '""' assert dtd.quotefordtd("") == '""' assert dtd.quotefordtd("Completed %S") == '"Completed &#037;S"' assert dtd.quotefordtd("&blockAttackSites;") == '"&blockAttackSites;"' assert dtd.quotefordtd("&#x00A0;") == '"&#x00A0;"' assert dtd.quotefordtd("&intro-point2-a;") == '"&intro-point2-a;"' assert dtd.quotefordtd("&basePBMenu.label;") == '"&basePBMenu.label;"' # The ' character isn't escaped as &apos; since the " char isn't present. assert dtd.quotefordtd("Don't buy") == '"Don\'t buy"' # The ' character is escaped as &apos; because the " character is present. assert dtd.quotefordtd("Don't \"buy\"") == '"Don&apos;t &quot;buy&quot;"' assert dtd.quotefordtd("A \"thing\"") == '"A &quot;thing&quot;"' # The " character is not escaped when it indicates an attribute value. assert dtd.quotefordtd("<a href=\"http") == "'<a href=\"http'" # &amp; assert dtd.quotefordtd("Color & Light") == '"Color &amp; Light"' assert dtd.quotefordtd("Color & &block;") == '"Color &amp; &block;"' assert dtd.quotefordtd("Color&Light &red;") == '"Color&amp;Light &red;"' assert dtd.quotefordtd("Color & Light; Yes") == '"Color &amp; Light; Yes"' @mark.xfail(reason="Not Implemented") def test_unquotefromdtd_unimplemented_cases(): """Test unimplemented unquoting DTD cases.""" assert dtd.unquotefromdtd('"&lt;p&gt; and &lt;/p&gt;"') == "<p> and </p>" def test_unquotefromdtd(): """Test unquoting DTD definitions""" # % assert dtd.unquotefromdtd('"Completed &#037;S"') == "Completed %S" assert dtd.unquotefromdtd('"Completed &#37;S"') == "Completed %S" assert dtd.unquotefromdtd('"Completed &#x25;S"') == "Completed %S" # &entity; assert dtd.unquotefromdtd('"Color&light &block;"') == "Color&light &block;" assert dtd.unquotefromdtd('"Color & Light; Red"') == "Color & Light; Red" assert dtd.unquotefromdtd('"&blockAttackSites;"') == "&blockAttackSites;" assert dtd.unquotefromdtd('"&intro-point2-a;"') == "&intro-point2-a;" assert dtd.unquotefromdtd('"&basePBMenu.label"') == "&basePBMenu.label" # &amp; assert dtd.unquotefromdtd('"Color &amp; Light"') == "Color & Light" assert dtd.unquotefromdtd('"Color &amp; &block;"') == "Color & &block;" # nbsp assert dtd.unquotefromdtd('"&#x00A0;"') == "&#x00A0;" # ' assert dtd.unquotefromdtd("'Don&apos;t buy'") == "Don't buy" # " assert dtd.unquotefromdtd("'Don&apos;t &quot;buy&quot;'") == 'Don\'t "buy"' assert dtd.unquotefromdtd('"A &quot;thing&quot;"') == "A \"thing\"" assert dtd.unquotefromdtd('"A &#x0022;thing&#x0022;"') == "A \"thing\"" assert dtd.unquotefromdtd("'<a href=\"http'") == "<a href=\"http" # other chars assert dtd.unquotefromdtd('"&#187;"') == u"»" def test_android_roundtrip_quoting(): specials = [ "don't", 'the "thing"' ] for special in specials: quoted_special = dtd.quoteforandroid(special) unquoted_special = dtd.unquotefromandroid(quoted_special) print("special: %r\nquoted: %r\nunquoted: %r\n" % (special, quoted_special, unquoted_special)) assert special == unquoted_special def test_quoteforandroid(): """Test quoting Android DTD definitions.""" assert dtd.quoteforandroid("don't") == r'"don\u0027t"' assert dtd.quoteforandroid('the "thing"') == r'"the \&quot;thing\&quot;"' def test_unquotefromandroid(): """Test unquoting Android DTD definitions.""" assert dtd.unquotefromandroid('"Don\\&apos;t show"') == "Don't show" assert dtd.unquotefromandroid('"Don\\\'t show"') == "Don't show" assert dtd.unquotefromandroid('"Don\\u0027t show"') == "Don't show" assert dtd.unquotefromandroid('"A \\&quot;thing\\&quot;"') == "A \"thing\"" def test_removeinvalidamp(recwarn): """tests the the removeinvalidamps function""" def tester(actual, expected=None): if expected is None: expected = actual assert dtd.removeinvalidamps("test.name", actual) == expected # No errors tester("Valid &entity; included") tester("Valid &entity.name; included") tester("Valid &#1234; included") tester("Valid &entity_name;") # Errors that require & removal tester("This &amp is broken", "This amp is broken") tester("Mad & &amp &amp;", "Mad amp &amp;") dtd.removeinvalidamps("simple.warningtest", "Dimpled &Ring") assert recwarn.pop(UserWarning) class TestDTDUnit(test_monolingual.TestMonolingualUnit): UnitClass = dtd.dtdunit def test_rich_get(self): pass def test_rich_set(self): pass class TestDTD(test_monolingual.TestMonolingualStore): StoreClass = dtd.dtdfile def dtdparse(self, dtdsource): """helper that parses dtd source without requiring files""" dummyfile = wStringIO.StringIO(dtdsource) dtdfile = dtd.dtdfile(dummyfile) return dtdfile def dtdregen(self, dtdsource): """helper that converts dtd source to dtdfile object and back""" return str(self.dtdparse(dtdsource)) def test_simpleentity(self): """checks that a simple dtd entity definition is parsed correctly""" dtdsource = '<!ENTITY test.me "bananas for sale">\n' dtdfile = self.dtdparse(dtdsource) assert len(dtdfile.units) == 1 dtdunit = dtdfile.units[0] assert dtdunit.entity == "test.me" assert dtdunit.definition == '"bananas for sale"' def test_blanklines(self): """checks that blank lines don't break the parsing or regeneration""" dtdsource = '<!ENTITY test.me "bananas for sale">\n\n' dtdregen = self.dtdregen(dtdsource) assert dtdsource == dtdregen def test_simpleentity_source(self): """checks that a simple dtd entity definition can be regenerated as source""" dtdsource = '<!ENTITY test.me "">\n' dtdregen = self.dtdregen(dtdsource) assert dtdsource == dtdregen dtdsource = '<!ENTITY test.me "bananas for sale">\n' dtdregen = self.dtdregen(dtdsource) assert dtdsource == dtdregen def test_hashcomment_source(self): """checks that a #expand comment is retained in the source""" dtdsource = '#expand <!ENTITY lang.version "__MOZILLA_LOCALE_VERSION__">\n' dtdregen = self.dtdregen(dtdsource) assert dtdsource == dtdregen def test_commentclosing(self): """tests that comment closes with trailing space aren't duplicated""" dtdsource = '<!-- little comment --> \n<!ENTITY pane.title "Notifications">\n' dtdregen = self.dtdregen(dtdsource) assert dtdsource == dtdregen def test_commententity(self): """check that we don't process messages in <!-- comments -->: bug 102""" dtdsource = '''<!-- commenting out until bug 38906 is fixed <!ENTITY messagesHeader.label "Messages"> -->''' dtdfile = self.dtdparse(dtdsource) assert len(dtdfile.units) == 1 dtdunit = dtdfile.units[0] print(dtdunit) assert dtdunit.isnull() def test_newlines_in_entity(self): """tests that we can handle newlines in the entity itself""" dtdsource = '''<!ENTITY fileNotFound.longDesc " <ul> <li>Check the file name for capitalisation or other typing errors.</li> <li>Check to see if the file was moved, renamed or deleted.</li> </ul> "> ''' dtdregen = self.dtdregen(dtdsource) print(dtdregen) print(dtdsource) assert dtdsource == dtdregen def test_conflate_comments(self): """Tests that comments don't run onto the same line""" dtdsource = '<!-- test comments -->\n<!-- getting conflated -->\n<!ENTITY sample.txt "hello">\n' dtdregen = self.dtdregen(dtdsource) print(dtdsource) print(dtdregen) assert dtdsource == dtdregen def test_localisation_notes(self): """test to ensure that we retain the localisation note correctly""" dtdsource = '''<!--LOCALIZATION NOTE (publishFtp.label): Edit box appears beside this label --> <!ENTITY publishFtp.label "If publishing to a FTP site, enter the HTTP address to browse to:"> ''' dtdregen = self.dtdregen(dtdsource) assert dtdsource == dtdregen def test_entitityreference_in_source(self): """checks that an &entity; in the source is retained""" dtdsource = '<!ENTITY % realBrandDTD SYSTEM "chrome://branding/locale/brand.dtd">\n%realBrandDTD;\n' dtdregen = self.dtdregen(dtdsource) assert dtdsource == dtdregen #test for bug #610 def test_entitityreference_order_in_source(self): """checks that an &entity; in the source is retained""" dtdsource = '<!ENTITY % realBrandDTD SYSTEM "chrome://branding/locale/brand.dtd">\n%realBrandDTD;\n<!-- some comment -->\n' dtdregen = self.dtdregen(dtdsource) assert dtdsource == dtdregen # The following test is identical to the one above, except that the entity is split over two lines. # This is to ensure that a recent bug fixed in dtdunit.parse() is at least partly documented. # The essence of the bug was that after it had read "realBrandDTD", the line index is not reset # before starting to parse the next line. It would then read the next available word (sequence of # alphanum characters) in stead of SYSTEM and then get very confused by not finding an opening ' or # " in the entity, borking the parsing for threst of the file. dtdsource = '<!ENTITY % realBrandDTD\n SYSTEM "chrome://branding/locale/brand.dtd">\n%realBrandDTD;\n' # FIXME: The following line is necessary, because of dtdfile's inability to remember the spacing of # the source DTD file when converting back to DTD. dtdregen = self.dtdregen(dtdsource).replace('realBrandDTD SYSTEM', 'realBrandDTD\n SYSTEM') print(dtdsource) print(dtdregen) assert dtdsource == dtdregen @mark.xfail(reason="Not Implemented") def test_comment_following(self): """check that comments that appear after and entity are not pushed onto another line""" dtdsource = '<!ENTITY textZoomEnlargeCmd.commandkey2 "="> <!-- + is above this key on many keyboards -->' dtdregen = self.dtdregen(dtdsource) assert dtdsource == dtdregen def test_comment_newline_space_closing(self): """check that comments that are closed by a newline then space then --> don't break the following entries""" dtdsource = '<!-- Comment\n -->\n<!ENTITY searchFocus.commandkey "k">\n' dtdregen = self.dtdregen(dtdsource) assert dtdsource == dtdregen @mark.xfail(reason="Not Implemented") def test_invalid_quoting(self): """checks that invalid quoting doesn't work - quotes can't be reopened""" # TODO: we should rather raise an error dtdsource = '<!ENTITY test.me "bananas for sale""room">\n' assert dtd.unquotefromdtd(dtdsource[dtdsource.find('"'):]) == 'bananas for sale' dtdfile = self.dtdparse(dtdsource) assert len(dtdfile.units) == 1 dtdunit = dtdfile.units[0] assert dtdunit.definition == '"bananas for sale"' assert str(dtdfile) == '<!ENTITY test.me "bananas for sale">\n' def test_missing_quotes(self, recwarn): """test that we fail graacefully when a message without quotes is found (bug #161)""" dtdsource = '<!ENTITY bad no quotes">\n<!ENTITY good "correct quotes">\n' dtdfile = self.dtdparse(dtdsource) assert len(dtdfile.units) == 1 assert recwarn.pop(Warning) # Test for bug #68 def test_entity_escaping(self): """Test entities escaping (&amp; &quot; &lt; &gt; &apos;) (bug #68)""" dtdsource = ('<!ENTITY securityView.privacy.header "Privacy &amp; ' 'History">\n<!ENTITY rights.safebrowsing-term3 "Uncheck ' 'the options to &quot;&blockAttackSites.label;&quot; and ' '&quot;&blockWebForgeries.label;&quot;">\n<!ENTITY ' 'translate.test1 \'XML encodings don&apos;t work\'>\n' '<!ENTITY translate.test2 "In HTML the text paragraphs ' 'are enclosed between &lt;p&gt; and &lt;/p&gt; tags.">\n') dtdfile = self.dtdparse(dtdsource) assert len(dtdfile.units) == 4 #dtdunit = dtdfile.units[0] #assert dtdunit.definition == '"Privacy &amp; History"' #assert dtdunit.target == "Privacy & History" #assert dtdunit.source == "Privacy & History" dtdunit = dtdfile.units[1] assert dtdunit.definition == ('"Uncheck the options to &quot;' '&blockAttackSites.label;&quot; and ' '&quot;&blockWebForgeries.label;&quot;"') assert dtdunit.target == ("Uncheck the options to \"" "&blockAttackSites.label;\" and \"" "&blockWebForgeries.label;\"") assert dtdunit.source == ("Uncheck the options to \"" "&blockAttackSites.label;\" and \"" "&blockWebForgeries.label;\"") dtdunit = dtdfile.units[2] assert dtdunit.definition == "'XML encodings don&apos;t work'" assert dtdunit.target == "XML encodings don\'t work" assert dtdunit.source == "XML encodings don\'t work" #dtdunit = dtdfile.units[3] #assert dtdunit.definition == ('"In HTML the text paragraphs are ' # 'enclosed between &lt;p&gt; and &lt;/p' # '&gt; tags."') #assert dtdunit.target == ("In HTML the text paragraphs are enclosed " # "between <p> and </p> tags.") #assert dtdunit.source == ("In HTML the text paragraphs are enclosed " # "between <p> and </p> tags.") # Test for bug #68 def test_entity_escaping_roundtrip(self): """Test entities escaping roundtrip (&amp; &quot; ...) (bug #68)""" dtdsource = ('<!ENTITY securityView.privacy.header "Privacy &amp; ' 'History">\n<!ENTITY rights.safebrowsing-term3 "Uncheck ' 'the options to &quot;&blockAttackSites.label;&quot; and ' '&quot;&blockWebForgeries.label;&quot;">\n<!ENTITY ' 'translate.test1 \'XML encodings don&apos;t work\'>\n' '<!ENTITY translate.test2 "In HTML the text paragraphs ' 'are enclosed between &lt;p&gt; and &lt;/p&gt; tags.">\n') dtdregen = self.dtdregen(dtdsource) assert dtdsource == dtdregen class TestAndroidDTD(test_monolingual.TestMonolingualStore): StoreClass = dtd.dtdfile def dtdparse(self, dtdsource): """Parses an Android DTD source string and returns a DTD store. This allows to simulate reading from Android DTD files without really having real Android DTD files. """ dummyfile = wStringIO.StringIO(dtdsource) dtdfile = dtd.dtdfile(dummyfile, android=True) return dtdfile def dtdregen(self, dtdsource): """Parses an Android DTD string to DTD store and then converts it back. This allows to simulate reading from an Android DTD file to an in-memory store and writing back to an Android DTD file without really having a real file. """ return str(self.dtdparse(dtdsource)) # Test for bug #2480 def test_android_single_quote_escape(self): """Checks several single quote unescaping cases in Android DTD. See bug #2480. """ dtdsource = ('<!ENTITY pref_char_encoding_off "Don\\\'t show menu">\n' '<!ENTITY sync.nodevice.label \'Don\\&apos;t show\'>\n' '<!ENTITY sync.nodevice.label "Don\\u0027t show">\n') dtdfile = self.dtdparse(dtdsource) assert len(dtdfile.units) == 3 dtdunit = dtdfile.units[0] assert dtdunit.definition == '"Don\\\'t show menu"' assert dtdunit.target == "Don't show menu" assert dtdunit.source == "Don't show menu" dtdunit = dtdfile.units[1] assert dtdunit.definition == "'Don\\&apos;t show'" assert dtdunit.target == "Don't show" assert dtdunit.source == "Don't show" dtdunit = dtdfile.units[2] assert dtdunit.definition == '"Don\\u0027t show"' assert dtdunit.target == "Don't show" assert dtdunit.source == "Don't show" # Test for bug #2480 def test_android_single_quote_escape_parse_and_convert_back(self): """Checks that Android DTD don't change after parse and convert back. An Android DTD source string with several single quote escapes is used instead of real files. See bug #2480. """ dtdsource = ('<!ENTITY pref_char_encoding_off "Don\\\'t show menu">\n' '<!ENTITY sync.nodevice.label \'Don\\&apos;t show\'>\n' '<!ENTITY sync.nodevice.label "Don\\u0027t show">\n') dtdregen = self.dtdregen(dtdsource) assert dtdsource == dtdregen def test_android_double_quote_escape(self): """Checks double quote unescaping in Android DTD.""" dtdsource = '<!ENTITY translate.test "A \\&quot;thing\\&quot;">\n' dtdfile = self.dtdparse(dtdsource) assert len(dtdfile.units) == 1 dtdunit = dtdfile.units[0] assert dtdunit.definition == '"A \\&quot;thing\\&quot;"' assert dtdunit.target == "A \"thing\"" assert dtdunit.source == "A \"thing\"" def test_android_double_quote_escape_parse_and_convert_back(self): """Checks that Android DTD don't change after parse and convert back. An Android DTD source string with double quote escapes is used instead of real files. """ dtdsource = '<!ENTITY translate.test "A \\&quot;thing\\&quot;">\n' dtdregen = self.dtdregen(dtdsource) assert dtdsource == dtdregen
bluemini/kuma
vendor/packages/translate/storage/test_dtd.py
Python
mpl-2.0
20,624
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Generate some standard test data for debugging TensorBoard. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import bisect import math import os import os.path import random import shutil import numpy as np from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf tf.flags.DEFINE_string("target", None, """The directoy where serialized data will be written""") tf.flags.DEFINE_boolean("overwrite", False, """Whether to remove and overwrite TARGET if it already exists.""") FLAGS = tf.flags.FLAGS # Hardcode a start time and reseed so script always generates the same data. _start_time = 0 random.seed(0) def _MakeHistogramBuckets(): v = 1E-12 buckets = [] neg_buckets = [] while v < 1E20: buckets.append(v) neg_buckets.append(-v) v *= 1.1 # Should include DBL_MAX, but won't bother for test data. return neg_buckets[::-1] + [0] + buckets def _MakeHistogram(values): """Convert values into a histogram proto using logic from histogram.cc.""" limits = _MakeHistogramBuckets() counts = [0] * len(limits) for v in values: idx = bisect.bisect_left(limits, v) counts[idx] += 1 limit_counts = [(limits[i], counts[i]) for i in xrange(len(limits)) if counts[i]] bucket_limit = [lc[0] for lc in limit_counts] bucket = [lc[1] for lc in limit_counts] sum_sq = sum(v * v for v in values) return tf.HistogramProto(min=min(values), max=max(values), num=len(values), sum=sum(values), sum_squares=sum_sq, bucket_limit=bucket_limit, bucket=bucket) def WriteScalarSeries(writer, tag, f, n=5): """Write a series of scalar events to writer, using f to create values.""" step = 0 wall_time = _start_time for i in xrange(n): v = f(i) value = tf.Summary.Value(tag=tag, simple_value=v) summary = tf.Summary(value=[value]) event = tf.Event(wall_time=wall_time, step=step, summary=summary) writer.add_event(event) step += 1 wall_time += 10 def WriteHistogramSeries(writer, tag, mu_sigma_tuples, n=20): """Write a sequence of normally distributed histograms to writer.""" step = 0 wall_time = _start_time for [mean, stddev] in mu_sigma_tuples: data = [random.normalvariate(mean, stddev) for _ in xrange(n)] histo = _MakeHistogram(data) summary = tf.Summary(value=[tf.Summary.Value(tag=tag, histo=histo)]) event = tf.Event(wall_time=wall_time, step=step, summary=summary) writer.add_event(event) step += 10 wall_time += 100 def WriteImageSeries(writer, tag, n_images=1): """Write a few dummy images to writer.""" step = 0 session = tf.Session() p = tf.placeholder("uint8", (1, 4, 4, 3)) s = tf.image_summary(tag, p) for _ in xrange(n_images): im = np.random.random_integers(0, 255, (1, 4, 4, 3)) summ = session.run(s, feed_dict={p: im}) writer.add_summary(summ, step) step += 20 session.close() def WriteAudioSeries(writer, tag, n_audio=1): """Write a few dummy audio clips to writer.""" step = 0 session = tf.Session() min_frequency_hz = 440 max_frequency_hz = 880 sample_rate = 4000 duration_frames = sample_rate * 0.5 # 0.5 seconds. frequencies_per_run = 1 num_channels = 2 p = tf.placeholder("float32", (frequencies_per_run, duration_frames, num_channels)) s = tf.audio_summary(tag, p, sample_rate) for _ in xrange(n_audio): # Generate a different frequency for each channel to show stereo works. frequencies = np.random.random_integers( min_frequency_hz, max_frequency_hz, size=(frequencies_per_run, num_channels)) tiled_frequencies = np.tile(frequencies, (1, duration_frames)) tiled_increments = np.tile( np.arange(0, duration_frames), (num_channels, 1)).T.reshape( 1, duration_frames * num_channels) tones = np.sin(2.0 * np.pi * tiled_frequencies * tiled_increments / sample_rate) tones = tones.reshape(frequencies_per_run, duration_frames, num_channels) summ = session.run(s, feed_dict={p: tones}) writer.add_summary(summ, step) step += 20 session.close() def GenerateTestData(path): """Generates the test data directory.""" run1_path = os.path.join(path, "run1") os.makedirs(run1_path) writer1 = tf.train.SummaryWriter(run1_path) WriteScalarSeries(writer1, "foo/square", lambda x: x * x) WriteScalarSeries(writer1, "bar/square", lambda x: x * x) WriteScalarSeries(writer1, "foo/sin", math.sin) WriteScalarSeries(writer1, "foo/cos", math.cos) WriteHistogramSeries(writer1, "histo1", [[0, 1], [0.3, 1], [0.5, 1], [0.7, 1], [1, 1]]) WriteImageSeries(writer1, "im1") WriteImageSeries(writer1, "im2") WriteAudioSeries(writer1, "au1") run2_path = os.path.join(path, "run2") os.makedirs(run2_path) writer2 = tf.train.SummaryWriter(run2_path) WriteScalarSeries(writer2, "foo/square", lambda x: x * x * 2) WriteScalarSeries(writer2, "bar/square", lambda x: x * x * 3) WriteScalarSeries(writer2, "foo/cos", lambda x: math.cos(x) * 2) WriteHistogramSeries(writer2, "histo1", [[0, 2], [0.3, 2], [0.5, 2], [0.7, 2], [1, 2]]) WriteHistogramSeries(writer2, "histo2", [[0, 1], [0.3, 1], [0.5, 1], [0.7, 1], [1, 1]]) WriteImageSeries(writer2, "im1") WriteAudioSeries(writer2, "au2") graph_def = tf.GraphDef() node1 = graph_def.node.add() node1.name = "a" node1.op = "matmul" node2 = graph_def.node.add() node2.name = "b" node2.op = "matmul" node2.input.extend(["a:0"]) writer1.add_graph(graph_def) node3 = graph_def.node.add() node3.name = "c" node3.op = "matmul" node3.input.extend(["a:0", "b:0"]) writer2.add_graph(graph_def) writer1.close() writer2.close() def main(unused_argv=None): target = FLAGS.target if not target: print("The --target flag is required.") return -1 if os.path.exists(target): if FLAGS.overwrite: if os.path.isdir(target): shutil.rmtree(target) else: os.remove(target) else: print("Refusing to overwrite target %s without --overwrite" % target) return -2 GenerateTestData(target) if __name__ == "__main__": tf.app.run()
cg31/tensorflow
tensorflow/tensorboard/scripts/generate_testdata.py
Python
apache-2.0
7,184
#!/usr/bin/env python # # Copyright 2009 Facebook # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Escaping/unescaping methods for HTML, JSON, URLs, and others. Also includes a few other miscellaneous string manipulation functions that have crept in over time. """ from __future__ import absolute_import, division, print_function import json import re from tornado.util import PY3, unicode_type, basestring_type if PY3: from urllib.parse import parse_qs as _parse_qs import html.entities as htmlentitydefs import urllib.parse as urllib_parse unichr = chr else: from urlparse import parse_qs as _parse_qs import htmlentitydefs import urllib as urllib_parse try: import typing # noqa except ImportError: pass _XHTML_ESCAPE_RE = re.compile('[&<>"\']') _XHTML_ESCAPE_DICT = {'&': '&amp;', '<': '&lt;', '>': '&gt;', '"': '&quot;', '\'': '&#39;'} def xhtml_escape(value): """Escapes a string so it is valid within HTML or XML. Escapes the characters ``<``, ``>``, ``"``, ``'``, and ``&``. When used in attribute values the escaped strings must be enclosed in quotes. .. versionchanged:: 3.2 Added the single quote to the list of escaped characters. """ return _XHTML_ESCAPE_RE.sub(lambda match: _XHTML_ESCAPE_DICT[match.group(0)], to_basestring(value)) def xhtml_unescape(value): """Un-escapes an XML-escaped string.""" return re.sub(r"&(#?)(\w+?);", _convert_entity, _unicode(value)) # The fact that json_encode wraps json.dumps is an implementation detail. # Please see https://github.com/tornadoweb/tornado/pull/706 # before sending a pull request that adds **kwargs to this function. def json_encode(value): """JSON-encodes the given Python object.""" # JSON permits but does not require forward slashes to be escaped. # This is useful when json data is emitted in a <script> tag # in HTML, as it prevents </script> tags from prematurely terminating # the javascript. Some json libraries do this escaping by default, # although python's standard library does not, so we do it here. # http://stackoverflow.com/questions/1580647/json-why-are-forward-slashes-escaped return json.dumps(value).replace("</", "<\\/") def json_decode(value): """Returns Python objects for the given JSON string.""" return json.loads(to_basestring(value)) def squeeze(value): """Replace all sequences of whitespace chars with a single space.""" return re.sub(r"[\x00-\x20]+", " ", value).strip() def url_escape(value, plus=True): """Returns a URL-encoded version of the given value. If ``plus`` is true (the default), spaces will be represented as "+" instead of "%20". This is appropriate for query strings but not for the path component of a URL. Note that this default is the reverse of Python's urllib module. .. versionadded:: 3.1 The ``plus`` argument """ quote = urllib_parse.quote_plus if plus else urllib_parse.quote return quote(utf8(value)) # python 3 changed things around enough that we need two separate # implementations of url_unescape. We also need our own implementation # of parse_qs since python 3's version insists on decoding everything. if not PY3: def url_unescape(value, encoding='utf-8', plus=True): """Decodes the given value from a URL. The argument may be either a byte or unicode string. If encoding is None, the result will be a byte string. Otherwise, the result is a unicode string in the specified encoding. If ``plus`` is true (the default), plus signs will be interpreted as spaces (literal plus signs must be represented as "%2B"). This is appropriate for query strings and form-encoded values but not for the path component of a URL. Note that this default is the reverse of Python's urllib module. .. versionadded:: 3.1 The ``plus`` argument """ unquote = (urllib_parse.unquote_plus if plus else urllib_parse.unquote) if encoding is None: return unquote(utf8(value)) else: return unicode_type(unquote(utf8(value)), encoding) parse_qs_bytes = _parse_qs else: def url_unescape(value, encoding='utf-8', plus=True): """Decodes the given value from a URL. The argument may be either a byte or unicode string. If encoding is None, the result will be a byte string. Otherwise, the result is a unicode string in the specified encoding. If ``plus`` is true (the default), plus signs will be interpreted as spaces (literal plus signs must be represented as "%2B"). This is appropriate for query strings and form-encoded values but not for the path component of a URL. Note that this default is the reverse of Python's urllib module. .. versionadded:: 3.1 The ``plus`` argument """ if encoding is None: if plus: # unquote_to_bytes doesn't have a _plus variant value = to_basestring(value).replace('+', ' ') return urllib_parse.unquote_to_bytes(value) else: unquote = (urllib_parse.unquote_plus if plus else urllib_parse.unquote) return unquote(to_basestring(value), encoding=encoding) def parse_qs_bytes(qs, keep_blank_values=False, strict_parsing=False): """Parses a query string like urlparse.parse_qs, but returns the values as byte strings. Keys still become type str (interpreted as latin1 in python3!) because it's too painful to keep them as byte strings in python3 and in practice they're nearly always ascii anyway. """ # This is gross, but python3 doesn't give us another way. # Latin1 is the universal donor of character encodings. result = _parse_qs(qs, keep_blank_values, strict_parsing, encoding='latin1', errors='strict') encoded = {} for k, v in result.items(): encoded[k] = [i.encode('latin1') for i in v] return encoded _UTF8_TYPES = (bytes, type(None)) def utf8(value): # type: (typing.Union[bytes,unicode_type,None])->typing.Union[bytes,None] """Converts a string argument to a byte string. If the argument is already a byte string or None, it is returned unchanged. Otherwise it must be a unicode string and is encoded as utf8. """ if isinstance(value, _UTF8_TYPES): return value if not isinstance(value, unicode_type): raise TypeError( "Expected bytes, unicode, or None; got %r" % type(value) ) return value.encode("utf-8") _TO_UNICODE_TYPES = (unicode_type, type(None)) def to_unicode(value): """Converts a string argument to a unicode string. If the argument is already a unicode string or None, it is returned unchanged. Otherwise it must be a byte string and is decoded as utf8. """ if isinstance(value, _TO_UNICODE_TYPES): return value if not isinstance(value, bytes): raise TypeError( "Expected bytes, unicode, or None; got %r" % type(value) ) return value.decode("utf-8") # to_unicode was previously named _unicode not because it was private, # but to avoid conflicts with the built-in unicode() function/type _unicode = to_unicode # When dealing with the standard library across python 2 and 3 it is # sometimes useful to have a direct conversion to the native string type if str is unicode_type: native_str = to_unicode else: native_str = utf8 _BASESTRING_TYPES = (basestring_type, type(None)) def to_basestring(value): """Converts a string argument to a subclass of basestring. In python2, byte and unicode strings are mostly interchangeable, so functions that deal with a user-supplied argument in combination with ascii string constants can use either and should return the type the user supplied. In python3, the two types are not interchangeable, so this method is needed to convert byte strings to unicode. """ if isinstance(value, _BASESTRING_TYPES): return value if not isinstance(value, bytes): raise TypeError( "Expected bytes, unicode, or None; got %r" % type(value) ) return value.decode("utf-8") def recursive_unicode(obj): """Walks a simple data structure, converting byte strings to unicode. Supports lists, tuples, and dictionaries. """ if isinstance(obj, dict): return dict((recursive_unicode(k), recursive_unicode(v)) for (k, v) in obj.items()) elif isinstance(obj, list): return list(recursive_unicode(i) for i in obj) elif isinstance(obj, tuple): return tuple(recursive_unicode(i) for i in obj) elif isinstance(obj, bytes): return to_unicode(obj) else: return obj # I originally used the regex from # http://daringfireball.net/2010/07/improved_regex_for_matching_urls # but it gets all exponential on certain patterns (such as too many trailing # dots), causing the regex matcher to never return. # This regex should avoid those problems. # Use to_unicode instead of tornado.util.u - we don't want backslashes getting # processed as escapes. _URL_RE = re.compile(to_unicode(r"""\b((?:([\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\s&()]|&amp;|&quot;)*(?:[^!"#$%&'()*+,.:;<=>?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&amp;|&quot;)*\)))+)""")) def linkify(text, shorten=False, extra_params="", require_protocol=False, permitted_protocols=["http", "https"]): """Converts plain text into HTML with links. For example: ``linkify("Hello http://tornadoweb.org!")`` would return ``Hello <a href="http://tornadoweb.org">http://tornadoweb.org</a>!`` Parameters: * ``shorten``: Long urls will be shortened for display. * ``extra_params``: Extra text to include in the link tag, or a callable taking the link as an argument and returning the extra text e.g. ``linkify(text, extra_params='rel="nofollow" class="external"')``, or:: def extra_params_cb(url): if url.startswith("http://example.com"): return 'class="internal"' else: return 'class="external" rel="nofollow"' linkify(text, extra_params=extra_params_cb) * ``require_protocol``: Only linkify urls which include a protocol. If this is False, urls such as www.facebook.com will also be linkified. * ``permitted_protocols``: List (or set) of protocols which should be linkified, e.g. ``linkify(text, permitted_protocols=["http", "ftp", "mailto"])``. It is very unsafe to include protocols such as ``javascript``. """ if extra_params and not callable(extra_params): extra_params = " " + extra_params.strip() def make_link(m): url = m.group(1) proto = m.group(2) if require_protocol and not proto: return url # not protocol, no linkify if proto and proto not in permitted_protocols: return url # bad protocol, no linkify href = m.group(1) if not proto: href = "http://" + href # no proto specified, use http if callable(extra_params): params = " " + extra_params(href).strip() else: params = extra_params # clip long urls. max_len is just an approximation max_len = 30 if shorten and len(url) > max_len: before_clip = url if proto: proto_len = len(proto) + 1 + len(m.group(3) or "") # +1 for : else: proto_len = 0 parts = url[proto_len:].split("/") if len(parts) > 1: # Grab the whole host part plus the first bit of the path # The path is usually not that interesting once shortened # (no more slug, etc), so it really just provides a little # extra indication of shortening. url = url[:proto_len] + parts[0] + "/" + \ parts[1][:8].split('?')[0].split('.')[0] if len(url) > max_len * 1.5: # still too long url = url[:max_len] if url != before_clip: amp = url.rfind('&') # avoid splitting html char entities if amp > max_len - 5: url = url[:amp] url += "..." if len(url) >= len(before_clip): url = before_clip else: # full url is visible on mouse-over (for those who don't # have a status bar, such as Safari by default) params += ' title="%s"' % href return u'<a href="%s"%s>%s</a>' % (href, params, url) # First HTML-escape so that our strings are all safe. # The regex is modified to avoid character entites other than &amp; so # that we won't pick up &quot;, etc. text = _unicode(xhtml_escape(text)) return _URL_RE.sub(make_link, text) def _convert_entity(m): if m.group(1) == "#": try: if m.group(2)[:1].lower() == 'x': return unichr(int(m.group(2)[1:], 16)) else: return unichr(int(m.group(2))) except ValueError: return "&#%s;" % m.group(2) try: return _HTML_UNICODE_MAP[m.group(2)] except KeyError: return "&%s;" % m.group(2) def _build_unicode_map(): unicode_map = {} for name, value in htmlentitydefs.name2codepoint.items(): unicode_map[name] = unichr(value) return unicode_map _HTML_UNICODE_MAP = _build_unicode_map()
unnikrishnankgs/va
venv/lib/python3.5/site-packages/tornado/escape.py
Python
bsd-2-clause
14,393
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals import frappe, re from frappe.website.website_generator import WebsiteGenerator from frappe.website.render import clear_cache from frappe.utils import today, cint, global_date_format, get_fullname from frappe.website.utils import find_first_image, get_comment_list from frappe.templates.pages.blog import get_children class BlogPost(WebsiteGenerator): condition_field = "published" template = "templates/generators/blog_post.html" save_versions = True order_by = "published_on desc" parent_website_route_field = "blog_category" page_title_field = "title" def validate(self): super(BlogPost, self).validate() if not self.blog_intro: self.blog_intro = self.content[:140] self.blog_intro = re.sub("\<[^>]*\>", "", self.blog_intro) if self.blog_intro: self.blog_intro = self.blog_intro[:140] if self.published and not self.published_on: self.published_on = today() # update posts frappe.db.sql("""update tabBlogger set posts=(select count(*) from `tabBlog Post` where ifnull(blogger,'')=tabBlogger.name) where name=%s""", (self.blogger,)) def on_update(self): WebsiteGenerator.on_update(self) clear_cache("writers") def get_context(self, context): # this is for double precaution. usually it wont reach this code if not published if not cint(self.published): raise Exception, "This blog has not been published yet!" # temp fields context.full_name = get_fullname(self.owner) context.updated = global_date_format(self.published_on) if self.blogger: context.blogger_info = frappe.get_doc("Blogger", self.blogger).as_dict() context.description = self.blog_intro or self.content[:140] context.metatags = { "name": self.title, "description": context.description, } image = find_first_image(self.content) if image: context.metatags["image"] = image context.categories = frappe.db.sql_list("""select name from `tabBlog Category` order by name""") context.comment_list = get_comment_list(self.doctype, self.name) context.children = get_children() return context def clear_blog_cache(): for blog in frappe.db.sql_list("""select page_name from `tabBlog Post` where ifnull(published,0)=1"""): clear_cache(blog) clear_cache("writers") @frappe.whitelist(allow_guest=True) def get_blog_list(start=0, by=None, category=None): condition = "" if by: condition = " and t1.blogger='%s'" % by.replace("'", "\'") if category: condition += " and t1.blog_category='%s'" % category.replace("'", "\'") query = """\ select t1.title, t1.name, concat(t1.parent_website_route, "/", t1.page_name) as page_name, t1.published_on as creation, day(t1.published_on) as day, monthname(t1.published_on) as month, year(t1.published_on) as year, ifnull(t1.blog_intro, t1.content) as content, t2.full_name, t2.avatar, t1.blogger, (select count(name) from `tabComment` where comment_doctype='Blog Post' and comment_docname=t1.name) as comments from `tabBlog Post` t1, `tabBlogger` t2 where ifnull(t1.published,0)=1 and t1.blogger = t2.name %(condition)s order by published_on desc, name asc limit %(start)s, 20""" % {"start": start, "condition": condition} result = frappe.db.sql(query, as_dict=1) # strip html tags from content for res in result: res['published'] = global_date_format(res['creation']) res['content'] = res['content'][:140] return result
gangadharkadam/shfr
frappe/website/doctype/blog_post/blog_post.py
Python
mit
3,543
class PayPalFailure(Exception): pass
AlphaCluster/NewsBlur
vendor/paypal/pro/exceptions.py
Python
mit
36
name0_1_1_0_0_2_0 = None name0_1_1_0_0_2_1 = None name0_1_1_0_0_2_2 = None name0_1_1_0_0_2_3 = None name0_1_1_0_0_2_4 = None
siosio/intellij-community
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_1/_pkg0_1_1_0/_pkg0_1_1_0_0/_mod0_1_1_0_0_2.py
Python
apache-2.0
128
__version__="0.12.5"
nzsquirrell/p2pool-myriad
oldstuff/SOAPpy/version.py
Python
gpl-3.0
22
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2012-Today OpenERP SA (<http://www.openerp.com>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> # ############################################################################## from openerp import tools from openerp.osv import osv from openerp.osv import fields from openerp.tools.translate import _ class invite_wizard(osv.osv_memory): """ Wizard to invite partners and make them followers. """ _name = 'mail.wizard.invite' _description = 'Invite wizard' def default_get(self, cr, uid, fields, context=None): result = super(invite_wizard, self).default_get(cr, uid, fields, context=context) if 'message' in fields and result.get('res_model') and result.get('res_id'): document_name = self.pool.get(result.get('res_model')).name_get(cr, uid, [result.get('res_id')], context=context)[0][1] message = _('<div>You have been invited to follow %s.</div>') % document_name result['message'] = message elif 'message' in fields: result['message'] = _('<div>You have been invited to follow a new document.</div>') return result _columns = { 'res_model': fields.char('Related Document Model', size=128, required=True, select=1, help='Model of the followed resource'), 'res_id': fields.integer('Related Document ID', select=1, help='Id of the followed resource'), 'partner_ids': fields.many2many('res.partner', string='Partners'), 'message': fields.html('Message'), } def add_followers(self, cr, uid, ids, context=None): for wizard in self.browse(cr, uid, ids, context=context): model_obj = self.pool.get(wizard.res_model) document = model_obj.browse(cr, uid, wizard.res_id, context=context) # filter partner_ids to get the new followers, to avoid sending email to already following partners new_follower_ids = [p.id for p in wizard.partner_ids if p.id not in document.message_follower_ids] model_obj.message_subscribe(cr, uid, [wizard.res_id], new_follower_ids, context=context) # send an email only if a personal message exists if wizard.message and not wizard.message == '<br>': # when deleting the message, cleditor keeps a <br> # add signature user_id = self.pool.get("res.users").read(cr, uid, [uid], fields=["signature"], context=context)[0] signature = user_id and user_id["signature"] or '' if signature: wizard.message = tools.append_content_to_html(wizard.message, signature, plaintext=True, container_tag='div') # FIXME 8.0: use notification_email_send, send a wall message and let mail handle email notification + message box for follower_id in new_follower_ids: mail_mail = self.pool.get('mail.mail') # the invite wizard should create a private message not related to any object -> no model, no res_id mail_id = mail_mail.create(cr, uid, { 'model': wizard.res_model, 'res_id': wizard.res_id, 'subject': _('Invitation to follow %s') % document.name_get()[0][1], 'body_html': '%s' % wizard.message, 'auto_delete': True, }, context=context) mail_mail.send(cr, uid, [mail_id], recipient_ids=[follower_id], context=context) return {'type': 'ir.actions.act_window_close'}
jaggu303619/asylum
openerp/addons/mail/wizard/invite.py
Python
agpl-3.0
4,418
from django.apps import AppConfig class WagtailAdminAppConfig(AppConfig): name = 'wagtail.wagtailadmin' label = 'wagtailadmin' verbose_name = "Wagtail admin"
WQuanfeng/wagtail
wagtail/wagtailadmin/apps.py
Python
bsd-3-clause
172
import logging access_logger = logging.getLogger('aiohttp.access') client_logger = logging.getLogger('aiohttp.client') internal_logger = logging.getLogger('aiohttp.internal') server_logger = logging.getLogger('aiohttp.server') web_logger = logging.getLogger('aiohttp.web') ws_logger = logging.getLogger('aiohttp.websocket')
arju88nair/projectCulminate
venv/lib/python3.5/site-packages/aiohttp/log.py
Python
apache-2.0
326
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # This helps you preview the apps and extensions docs. # # ./preview.py --help # # There are two modes: server- and render- mode. The default is server, in which # a webserver is started on a port (default 8000). Navigating to paths on # http://localhost:8000, for example # # http://localhost:8000/extensions/tabs.html # # will render the documentation for the extension tabs API. # # On the other hand, render mode statically renders docs to stdout. Use this # to save the output (more convenient than needing to save the page in a # browser), handy when uploading the docs somewhere (e.g. for a review), # and for profiling the server. For example, # # ./preview.py -r extensions/tabs.html # # will output the documentation for the tabs API on stdout and exit immediately. # NOTE: RUN THIS FIRST. Or all third_party imports will fail. import build_server # Copy all the files necessary to run the server. These are cleaned up when the # server quits. build_server.main() from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer import logging import optparse import posixpath import time from local_renderer import LocalRenderer class _RequestHandler(BaseHTTPRequestHandler): '''A HTTPRequestHandler that outputs the docs page generated by Handler. ''' def do_GET(self): # Sanitize path to guarantee that it stays within the server. if not posixpath.abspath(self.path.lstrip('/')).startswith( posixpath.abspath('')): return # Rewrite paths that would otherwise be served from app.yaml. self.path = { '/robots.txt': '../../server2/robots.txt', '/favicon.ico': '../../server2/chrome-32.ico', '/apple-touch-icon-precomposed.png': '../../server2/chrome-128.png' }.get(self.path, self.path) response = LocalRenderer.Render(self.path, headers=dict(self.headers)) self.protocol_version = 'HTTP/1.1' self.send_response(response.status) for k, v in response.headers.iteritems(): self.send_header(k, v) self.end_headers() self.wfile.write(response.content.ToString()) if __name__ == '__main__': parser = optparse.OptionParser( description='Runs a server to preview the extension documentation.', usage='usage: %prog [option]...') parser.add_option('-a', '--address', default='127.0.0.1', help='the local interface address to bind the server to') parser.add_option('-p', '--port', default='8000', help='port to run the server on') parser.add_option('-r', '--render', default='', help='statically render a page and print to stdout rather than starting ' 'the server, e.g. apps/storage.html. The path may optionally end ' 'with #n where n is the number of times to render the page before ' 'printing it, e.g. apps/storage.html#50, to use for profiling.') parser.add_option('-s', '--stat', help='Print profile stats at the end of the run using the given ' 'profiling option (like "tottime"). -t is ignored if this is set.') parser.add_option('-t', '--time', action='store_true', help='Print the time taken rendering rather than the result.') (opts, argv) = parser.parse_args() if opts.render: if opts.render.find('#') >= 0: (path, iterations) = opts.render.rsplit('#', 1) extra_iterations = int(iterations) - 1 else: path = opts.render extra_iterations = 0 if opts.stat: import cProfile, pstats, StringIO pr = cProfile.Profile() pr.enable() elif opts.time: start_time = time.time() response = LocalRenderer.Render(path) if response.status != 200: print('Error status: %s' % response.status) exit(1) for _ in range(extra_iterations): LocalRenderer.Render(path) if opts.stat: pr.disable() s = StringIO.StringIO() pstats.Stats(pr, stream=s).sort_stats(opts.stat).print_stats() print(s.getvalue()) elif opts.time: print('Took %s seconds' % (time.time() - start_time)) else: print(response.content.ToString()) exit() print('Starting previewserver on port %s' % opts.port) print('') print('The extension documentation can be found at:') print('') print(' http://localhost:%s/extensions/' % opts.port) print('') print('The apps documentation can be found at:') print('') print(' http://localhost:%s/apps/' % opts.port) print('') logging.getLogger().setLevel(logging.INFO) server = HTTPServer((opts.address, int(opts.port)), _RequestHandler) try: server.serve_forever() finally: server.socket.close()
ds-hwang/chromium-crosswalk
chrome/common/extensions/docs/server2/preview.py
Python
bsd-3-clause
4,795
# -*- coding: utf-8 -*- import os import httplib import logging import functools from modularodm.exceptions import ValidationValueError from framework.exceptions import HTTPError from framework.analytics import update_counter from website.addons.osfstorage import settings logger = logging.getLogger(__name__) LOCATION_KEYS = ['service', settings.WATERBUTLER_RESOURCE, 'object'] def update_analytics(node, file_id, version_idx): """ :param Node node: Root node to update :param str file_id: The _id field of a filenode :param int version_idx: Zero-based version index """ update_counter(u'download:{0}:{1}'.format(node._id, file_id)) update_counter(u'download:{0}:{1}:{2}'.format(node._id, file_id, version_idx)) def serialize_revision(node, record, version, index, anon=False): """Serialize revision for use in revisions table. :param Node node: Root node :param FileRecord record: Root file record :param FileVersion version: The version to serialize :param int index: One-based index of version """ if anon: user = None else: user = { 'name': version.creator.fullname, 'url': version.creator.url, } return { 'user': user, 'index': index + 1, 'date': version.date_created.isoformat(), 'downloads': record.get_download_count(version=index), 'md5': version.metadata.get('md5'), 'sha256': version.metadata.get('sha256'), } SIGNED_REQUEST_ERROR = HTTPError( httplib.SERVICE_UNAVAILABLE, data={ 'message_short': 'Upload service unavailable', 'message_long': ( 'Upload service is not available; please retry ' 'your upload in a moment' ), }, ) def get_filename(version_idx, file_version, file_record): """Build name for downloaded file, appending version date if not latest. :param int version_idx: One-based version index :param FileVersion file_version: Version to name :param FileRecord file_record: Root file object """ if version_idx == len(file_record.versions): return file_record.name name, ext = os.path.splitext(file_record.name) return u'{name}-{date}{ext}'.format( name=name, date=file_version.date_created.isoformat(), ext=ext, ) def validate_location(value): for key in LOCATION_KEYS: if key not in value: raise ValidationValueError def must_be(_type): """A small decorator factory for OsfStorageFileNode. Acts as a poor mans polymorphic inheritance, ensures that the given instance is of "kind" folder or file """ def _must_be(func): @functools.wraps(func) def wrapped(self, *args, **kwargs): if not self.kind == _type: raise ValueError('This instance is not a {}'.format(_type)) return func(self, *args, **kwargs) return wrapped return _must_be def copy_files(src, target_settings, parent=None, name=None): """Copy the files from src to the target nodesettings :param OsfStorageFileNode src: The source to copy children from :param OsfStorageNodeSettings target_settings: The node settings of the project to copy files to :param OsfStorageFileNode parent: The parent of to attach the clone of src to, if applicable """ cloned = src.clone() cloned.parent = parent cloned.name = name or cloned.name cloned.node_settings = target_settings if src.is_file: cloned.versions = src.versions cloned.save() if src.is_folder: for child in src.children: copy_files(child, target_settings, parent=cloned) return cloned
ticklemepierce/osf.io
website/addons/osfstorage/utils.py
Python
apache-2.0
3,726
#!/usr/bin/python # (c) 2017, NetApp, Inc # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' module: sf_account_manager short_description: Manage SolidFire accounts extends_documentation_fragment: - netapp.solidfire version_added: '2.3' author: Sumit Kumar ([email protected]) description: - Create, destroy, or update accounts on SolidFire options: state: description: - Whether the specified account should exist or not. required: true choices: ['present', 'absent'] name: description: - Unique username for this account. (May be 1 to 64 characters in length). required: true new_name: description: - New name for the user account. required: false default: None initiator_secret: description: - CHAP secret to use for the initiator. Should be 12-16 characters long and impenetrable. - The CHAP initiator secrets must be unique and cannot be the same as the target CHAP secret. - If not specified, a random secret is created. required: false target_secret: description: - CHAP secret to use for the target (mutual CHAP authentication). - Should be 12-16 characters long and impenetrable. - The CHAP target secrets must be unique and cannot be the same as the initiator CHAP secret. - If not specified, a random secret is created. required: false attributes: description: List of Name/Value pairs in JSON object format. required: false account_id: description: - The ID of the account to manage or update. required: false default: None status: description: - Status of the account. required: false ''' EXAMPLES = """ - name: Create Account sf_account_manager: hostname: "{{ solidfire_hostname }}" username: "{{ solidfire_username }}" password: "{{ solidfire_password }}" state: present name: TenantA - name: Modify Account sf_account_manager: hostname: "{{ solidfire_hostname }}" username: "{{ solidfire_username }}" password: "{{ solidfire_password }}" state: present name: TenantA new_name: TenantA-Renamed - name: Delete Account sf_account_manager: hostname: "{{ solidfire_hostname }}" username: "{{ solidfire_username }}" password: "{{ solidfire_password }}" state: absent name: TenantA-Renamed """ RETURN = """ """ import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_native import ansible.module_utils.netapp as netapp_utils HAS_SF_SDK = netapp_utils.has_sf_sdk() class SolidFireAccount(object): def __init__(self): self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() self.argument_spec.update(dict( state=dict(required=True, choices=['present', 'absent']), name=dict(required=True, type='str'), account_id=dict(required=False, type='int', default=None), new_name=dict(required=False, type='str', default=None), initiator_secret=dict(required=False, type='str'), target_secret=dict(required=False, type='str'), attributes=dict(required=False, type='dict'), status=dict(required=False, type='str'), )) self.module = AnsibleModule( argument_spec=self.argument_spec, supports_check_mode=True ) p = self.module.params # set up state variables self.state = p['state'] self.name = p['name'] self.account_id = p['account_id'] self.new_name = p['new_name'] self.initiator_secret = p['initiator_secret'] self.target_secret = p['target_secret'] self.attributes = p['attributes'] self.status = p['status'] if HAS_SF_SDK is False: self.module.fail_json(msg="Unable to import the SolidFire Python SDK") else: self.sfe = netapp_utils.create_sf_connection(module=self.module) def get_account(self): """ Return account object if found :return: Details about the account. None if not found. :rtype: dict """ account_list = self.sfe.list_accounts() for account in account_list.accounts: if account.username == self.name: # Update self.account_id: if self.account_id is not None: if account.account_id == self.account_id: return account else: self.account_id = account.account_id return account return None def create_account(self): try: self.sfe.add_account(username=self.name, initiator_secret=self.initiator_secret, target_secret=self.target_secret, attributes=self.attributes) except Exception as e: self.module.fail_json(msg='Error creating account %s: %s)' % (self.name, to_native(e)), exception=traceback.format_exc()) def delete_account(self): try: self.sfe.remove_account(account_id=self.account_id) except Exception as e: self.module.fail_json(msg='Error deleting account %s: %s' % (self.account_id, to_native(e)), exception=traceback.format_exc()) def update_account(self): try: self.sfe.modify_account(account_id=self.account_id, username=self.new_name, status=self.status, initiator_secret=self.initiator_secret, target_secret=self.target_secret, attributes=self.attributes) except Exception as e: self.module.fail_json(msg='Error updating account %s: %s' % (self.account_id, to_native(e)), exception=traceback.format_exc()) def apply(self): changed = False account_exists = False update_account = False account_detail = self.get_account() if account_detail: account_exists = True if self.state == 'absent': changed = True elif self.state == 'present': # Check if we need to update the account if account_detail.username is not None and self.new_name is not None and \ account_detail.username != self.new_name: update_account = True changed = True elif account_detail.status is not None and self.status is not None \ and account_detail.status != self.status: update_account = True changed = True elif account_detail.initiator_secret is not None and self.initiator_secret is not None \ and account_detail.initiator_secret != self.initiator_secret: update_account = True changed = True elif account_detail.target_secret is not None and self.target_secret is not None \ and account_detail.target_secret != self.target_secret: update_account = True changed = True elif account_detail.attributes is not None and self.attributes is not None \ and account_detail.attributes != self.attributes: update_account = True changed = True else: if self.state == 'present': changed = True if changed: if self.module.check_mode: pass else: if self.state == 'present': if not account_exists: self.create_account() elif update_account: self.update_account() elif self.state == 'absent': self.delete_account() self.module.exit_json(changed=changed) def main(): v = SolidFireAccount() v.apply() if __name__ == '__main__': main()
tsdmgz/ansible
lib/ansible/modules/storage/netapp/sf_account_manager.py
Python
gpl-3.0
8,755
#!/usr/bin/python # Copyright (C) International Business Machines Corp., 2005 # Author: Woody Marvel <[email protected]> # Li Ge <[email protected]> # Positive Test: # Test Description: # 1. Create a domain # 2. Destroy the domain by id import sys import re import time from XmTestLib import * # Create a domain (default XmTestDomain, with our ramdisk) domain = XmTestDomain() # Start it try: domain.start(noConsole=True) except DomainError, e: if verbose: print "Failed to create test domain because:" print e.extra FAIL(str(e)) # destroy domain - positive test status, output = traceCommand("xm destroy %s" % domain.getId()) if status != 0: FAIL("xm destroy returned invalid %i != 0" % status)
flexiant/xen
tools/xm-test/tests/destroy/05_destroy_byid_pos.py
Python
gpl-2.0
735
import os import re import subprocess import sys import urlparse from wptrunner.update.sync import LoadManifest from wptrunner.update.tree import get_unique_name from wptrunner.update.base import Step, StepRunner, exit_clean, exit_unclean from .tree import Commit, GitTree, Patch import github from .github import GitHub def rewrite_patch(patch, strip_dir): """Take a Patch and convert to a different repository by stripping a prefix from the file paths. Also rewrite the message to remove the bug number and reviewer, but add a bugzilla link in the summary. :param patch: the Patch to convert :param strip_dir: the path prefix to remove """ if not strip_dir.startswith("/"): strip_dir = "/%s"% strip_dir new_diff = [] line_starts = ["diff ", "+++ ", "--- "] for line in patch.diff.split("\n"): for start in line_starts: if line.startswith(start): new_diff.append(line.replace(strip_dir, "").encode("utf8")) break else: new_diff.append(line) new_diff = "\n".join(new_diff) assert new_diff != patch return Patch(patch.author, patch.email, rewrite_message(patch), new_diff) def rewrite_message(patch): rest = patch.message.body if patch.message.bug is not None: return "\n".join([patch.message.summary, patch.message.body, "", "Upstreamed from https://bugzilla.mozilla.org/show_bug.cgi?id=%s" % patch.message.bug]) return "\n".join([patch.message.full_summary, rest]) class SyncToUpstream(Step): """Sync local changes to upstream""" def create(self, state): if not state.kwargs["upstream"]: return if not isinstance(state.local_tree, GitTree): self.logger.error("Cannot sync with upstream from a non-Git checkout.") return exit_clean try: import requests except ImportError: self.logger.error("Upstream sync requires the requests module to be installed") return exit_clean if not state.sync_tree: os.makedirs(state.sync["path"]) state.sync_tree = GitTree(root=state.sync["path"]) kwargs = state.kwargs with state.push(["local_tree", "sync_tree", "tests_path", "metadata_path", "sync"]): state.token = kwargs["token"] runner = SyncToUpstreamRunner(self.logger, state) runner.run() class CheckoutBranch(Step): """Create a branch in the sync tree pointing at the last upstream sync commit and check it out""" provides = ["branch"] def create(self, state): self.logger.info("Updating sync tree from %s" % state.sync["remote_url"]) state.branch = state.sync_tree.unique_branch_name( "outbound_update_%s" % state.test_manifest.rev) state.sync_tree.update(state.sync["remote_url"], state.sync["branch"], state.branch) state.sync_tree.checkout(state.test_manifest.rev, state.branch, force=True) class GetLastSyncCommit(Step): """Find the gecko commit at which we last performed a sync with upstream.""" provides = ["last_sync_path", "last_sync_commit"] def create(self, state): self.logger.info("Looking for last sync commit") state.last_sync_path = os.path.join(state.metadata_path, "mozilla-sync") with open(state.last_sync_path) as f: last_sync_sha1 = f.read().strip() state.last_sync_commit = Commit(state.local_tree, last_sync_sha1) if not state.local_tree.contains_commit(state.last_sync_commit): self.logger.error("Could not find last sync commit %s" % last_sync_sha1) return exit_clean self.logger.info("Last sync to web-platform-tests happened in %s" % state.last_sync_commit.sha1) class GetBaseCommit(Step): """Find the latest upstream commit on the branch that we are syncing with""" provides = ["base_commit"] def create(self, state): state.base_commit = state.sync_tree.get_remote_sha1(state.sync["remote_url"], state.sync["branch"]) self.logger.debug("New base commit is %s" % state.base_commit.sha1) class LoadCommits(Step): """Get a list of commits in the gecko tree that need to be upstreamed""" provides = ["source_commits"] def create(self, state): state.source_commits = state.local_tree.log(state.last_sync_commit, state.tests_path) update_regexp = re.compile("Bug \d+ - Update web-platform-tests to revision [0-9a-f]{40}") for i, commit in enumerate(state.source_commits[:]): if update_regexp.match(commit.message.text): # This is a previous update commit so ignore it state.source_commits.remove(commit) continue if commit.message.backouts: #TODO: Add support for collapsing backouts raise NotImplementedError("Need to get the Git->Hg commits for backouts and remove the backed out patch") if not commit.message.bug: self.logger.error("Commit %i (%s) doesn't have an associated bug number." % (i + 1, commit.sha1)) return exit_unclean self.logger.debug("Source commits: %s" % state.source_commits) class SelectCommits(Step): """Provide a UI to select which commits to upstream""" def create(self, state): if not state.source_commits: return while True: commits = state.source_commits[:] for i, commit in enumerate(commits): print "%i:\t%s" % (i, commit.message.summary) remove = raw_input("Provide a space-separated list of any commits numbers to remove from the list to upstream:\n").strip() remove_idx = set() invalid = False for item in remove.split(" "): try: item = int(item) except: invalid = True break if item < 0 or item >= len(commits): invalid = True break remove_idx.add(item) if invalid: continue keep_commits = [(i,cmt) for i,cmt in enumerate(commits) if i not in remove_idx] #TODO: consider printed removed commits print "Selected the following commits to keep:" for i, commit in keep_commits: print "%i:\t%s" % (i, commit.message.summary) confirm = raw_input("Keep the above commits? y/n\n").strip().lower() if confirm == "y": state.source_commits = [item[1] for item in keep_commits] break class MovePatches(Step): """Convert gecko commits into patches against upstream and commit these to the sync tree.""" provides = ["commits_loaded"] def create(self, state): state.commits_loaded = 0 strip_path = os.path.relpath(state.tests_path, state.local_tree.root) self.logger.debug("Stripping patch %s" % strip_path) for commit in state.source_commits[state.commits_loaded:]: i = state.commits_loaded + 1 self.logger.info("Moving commit %i: %s" % (i, commit.message.full_summary)) patch = commit.export_patch(state.tests_path) stripped_patch = rewrite_patch(patch, strip_path) try: state.sync_tree.import_patch(stripped_patch) except: print patch.diff raise state.commits_loaded = i class RebaseCommits(Step): """Rebase commits from the current branch on top of the upstream destination branch. This step is particularly likely to fail if the rebase generates merge conflicts. In that case the conflicts can be fixed up locally and the sync process restarted with --continue. """ provides = ["rebased_commits"] def create(self, state): self.logger.info("Rebasing local commits") continue_rebase = False # Check if there's a rebase in progress if (os.path.exists(os.path.join(state.sync_tree.root, ".git", "rebase-merge")) or os.path.exists(os.path.join(state.sync_tree.root, ".git", "rebase-apply"))): continue_rebase = True try: state.sync_tree.rebase(state.base_commit, continue_rebase=continue_rebase) except subprocess.CalledProcessError: self.logger.info("Rebase failed, fix merge and run %s again with --continue" % sys.argv[0]) raise state.rebased_commits = state.sync_tree.log(state.base_commit) self.logger.info("Rebase successful") class CheckRebase(Step): """Check if there are any commits remaining after rebase""" def create(self, state): if not state.rebased_commits: self.logger.info("Nothing to upstream, exiting") return exit_clean class MergeUpstream(Step): """Run steps to push local commits as seperate PRs and merge upstream.""" provides = ["merge_index", "gh_repo"] def create(self, state): gh = GitHub(state.token) if "merge_index" not in state: state.merge_index = 0 org, name = urlparse.urlsplit(state.sync["remote_url"]).path[1:].split("/") if name.endswith(".git"): name = name[:-4] state.gh_repo = gh.repo(org, name) for commit in state.rebased_commits[state.merge_index:]: with state.push(["gh_repo", "sync_tree"]): state.commit = commit pr_merger = PRMergeRunner(self.logger, state) rv = pr_merger.run() if rv is not None: return rv state.merge_index += 1 class UpdateLastSyncCommit(Step): """Update the gecko commit at which we last performed a sync with upstream.""" provides = [] def create(self, state): self.logger.info("Updating last sync commit") with open(state.last_sync_path, "w") as f: f.write(state.local_tree.rev) # This gets added to the patch later on class MergeLocalBranch(Step): """Create a local branch pointing at the commit to upstream""" provides = ["local_branch"] def create(self, state): branch_prefix = "sync_%s" % state.commit.sha1 local_branch = state.sync_tree.unique_branch_name(branch_prefix) state.sync_tree.create_branch(local_branch, state.commit) state.local_branch = local_branch class MergeRemoteBranch(Step): """Get an unused remote branch name to use for the PR""" provides = ["remote_branch"] def create(self, state): remote_branch = "sync_%s" % state.commit.sha1 branches = [ref[len("refs/heads/"):] for sha1, ref in state.sync_tree.list_remote(state.gh_repo.url) if ref.startswith("refs/heads")] state.remote_branch = get_unique_name(branches, remote_branch) class PushUpstream(Step): """Push local branch to remote""" def create(self, state): self.logger.info("Pushing commit upstream") state.sync_tree.push(state.gh_repo.url, state.local_branch, state.remote_branch) class CreatePR(Step): """Create a PR for the remote branch""" provides = ["pr"] def create(self, state): self.logger.info("Creating a PR") commit = state.commit state.pr = state.gh_repo.create_pr(commit.message.full_summary, state.remote_branch, "master", commit.message.body if commit.message.body else "") class PRAddComment(Step): """Add an issue comment indicating that the code has been reviewed already""" def create(self, state): state.pr.issue.add_comment("Code reviewed upstream.") class MergePR(Step): """Merge the PR""" def create(self, state): self.logger.info("Merging PR") state.pr.merge() class PRDeleteBranch(Step): """Delete the remote branch""" def create(self, state): self.logger.info("Deleting remote branch") state.sync_tree.push(state.gh_repo.url, "", state.remote_branch) class SyncToUpstreamRunner(StepRunner): """Runner for syncing local changes to upstream""" steps = [LoadManifest, CheckoutBranch, GetLastSyncCommit, GetBaseCommit, LoadCommits, SelectCommits, MovePatches, RebaseCommits, CheckRebase, MergeUpstream, UpdateLastSyncCommit] class PRMergeRunner(StepRunner): """(Sub)Runner for creating and merging a PR""" steps = [ MergeLocalBranch, MergeRemoteBranch, PushUpstream, CreatePR, PRAddComment, MergePR, PRDeleteBranch, ]
meh/servo
tests/wpt/update/upstream.py
Python
mpl-2.0
13,551
from __future__ import absolute_import, division import time import os try: unicode except NameError: unicode = str from . import LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked class SQLiteLockFile(LockBase): "Demonstrate SQL-based locking." testdb = None def __init__(self, path, threaded=True, timeout=None): """ >>> lock = SQLiteLockFile('somefile') >>> lock = SQLiteLockFile('somefile', threaded=False) """ LockBase.__init__(self, path, threaded, timeout) self.lock_file = unicode(self.lock_file) self.unique_name = unicode(self.unique_name) if SQLiteLockFile.testdb is None: import tempfile _fd, testdb = tempfile.mkstemp() os.close(_fd) os.unlink(testdb) del _fd, tempfile SQLiteLockFile.testdb = testdb import sqlite3 self.connection = sqlite3.connect(SQLiteLockFile.testdb) c = self.connection.cursor() try: c.execute("create table locks" "(" " lock_file varchar(32)," " unique_name varchar(32)" ")") except sqlite3.OperationalError: pass else: self.connection.commit() import atexit atexit.register(os.unlink, SQLiteLockFile.testdb) def acquire(self, timeout=None): timeout = timeout if timeout is not None else self.timeout end_time = time.time() if timeout is not None and timeout > 0: end_time += timeout if timeout is None: wait = 0.1 elif timeout <= 0: wait = 0 else: wait = timeout / 10 cursor = self.connection.cursor() while True: if not self.is_locked(): # Not locked. Try to lock it. cursor.execute("insert into locks" " (lock_file, unique_name)" " values" " (?, ?)", (self.lock_file, self.unique_name)) self.connection.commit() # Check to see if we are the only lock holder. cursor.execute("select * from locks" " where unique_name = ?", (self.unique_name,)) rows = cursor.fetchall() if len(rows) > 1: # Nope. Someone else got there. Remove our lock. cursor.execute("delete from locks" " where unique_name = ?", (self.unique_name,)) self.connection.commit() else: # Yup. We're done, so go home. return else: # Check to see if we are the only lock holder. cursor.execute("select * from locks" " where unique_name = ?", (self.unique_name,)) rows = cursor.fetchall() if len(rows) == 1: # We're the locker, so go home. return # Maybe we should wait a bit longer. if timeout is not None and time.time() > end_time: if timeout > 0: # No more waiting. raise LockTimeout("Timeout waiting to acquire" " lock for %s" % self.path) else: # Someone else has the lock and we are impatient.. raise AlreadyLocked("%s is already locked" % self.path) # Well, okay. We'll give it a bit longer. time.sleep(wait) def release(self): if not self.is_locked(): raise NotLocked("%s is not locked" % self.path) if not self.i_am_locking(): raise NotMyLock("%s is locked, but not by me (by %s)" % (self.unique_name, self._who_is_locking())) cursor = self.connection.cursor() cursor.execute("delete from locks" " where unique_name = ?", (self.unique_name,)) self.connection.commit() def _who_is_locking(self): cursor = self.connection.cursor() cursor.execute("select unique_name from locks" " where lock_file = ?", (self.lock_file,)) return cursor.fetchone()[0] def is_locked(self): cursor = self.connection.cursor() cursor.execute("select * from locks" " where lock_file = ?", (self.lock_file,)) rows = cursor.fetchall() return not not rows def i_am_locking(self): cursor = self.connection.cursor() cursor.execute("select * from locks" " where lock_file = ?" " and unique_name = ?", (self.lock_file, self.unique_name)) return not not cursor.fetchall() def break_lock(self): cursor = self.connection.cursor() cursor.execute("delete from locks" " where lock_file = ?", (self.lock_file,)) self.connection.commit()
allieus/pylockfile
lockfile/sqlitelockfile.py
Python
mit
5,541
from django.db import models, DEFAULT_DB_ALIAS, connection from django.contrib.auth.models import User from django.conf import settings class Animal(models.Model): name = models.CharField(max_length=150) latin_name = models.CharField(max_length=150) count = models.IntegerField() weight = models.FloatField() # use a non-default name for the default manager specimens = models.Manager() def __unicode__(self): return self.name class Plant(models.Model): name = models.CharField(max_length=150) class Meta: # For testing when upper case letter in app name; regression for #4057 db_table = "Fixtures_regress_plant" class Stuff(models.Model): name = models.CharField(max_length=20, null=True) owner = models.ForeignKey(User, null=True) def __unicode__(self): return unicode(self.name) + u' is owned by ' + unicode(self.owner) class Absolute(models.Model): name = models.CharField(max_length=40) load_count = 0 def __init__(self, *args, **kwargs): super(Absolute, self).__init__(*args, **kwargs) Absolute.load_count += 1 class Parent(models.Model): name = models.CharField(max_length=10) class Meta: ordering = ('id',) class Child(Parent): data = models.CharField(max_length=10) # Models to regression test #7572 class Channel(models.Model): name = models.CharField(max_length=255) class Article(models.Model): title = models.CharField(max_length=255) channels = models.ManyToManyField(Channel) class Meta: ordering = ('id',) # Models to regression test #11428 class Widget(models.Model): name = models.CharField(max_length=255) class Meta: ordering = ('name',) def __unicode__(self): return self.name class WidgetProxy(Widget): class Meta: proxy = True # Check for forward references in FKs and M2Ms with natural keys class TestManager(models.Manager): def get_by_natural_key(self, key): return self.get(name=key) class Store(models.Model): objects = TestManager() name = models.CharField(max_length=255) class Meta: ordering = ('name',) def __unicode__(self): return self.name def natural_key(self): return (self.name,) class Person(models.Model): objects = TestManager() name = models.CharField(max_length=255) class Meta: ordering = ('name',) def __unicode__(self): return self.name # Person doesn't actually have a dependency on store, but we need to define # one to test the behaviour of the dependency resolution algorithm. def natural_key(self): return (self.name,) natural_key.dependencies = ['fixtures_regress.store'] class Book(models.Model): name = models.CharField(max_length=255) author = models.ForeignKey(Person) stores = models.ManyToManyField(Store) class Meta: ordering = ('name',) def __unicode__(self): return u'%s by %s (available at %s)' % ( self.name, self.author.name, ', '.join(s.name for s in self.stores.all()) ) class NKManager(models.Manager): def get_by_natural_key(self, data): return self.get(data=data) class NKChild(Parent): data = models.CharField(max_length=10, unique=True) objects = NKManager() def natural_key(self): return self.data def __unicode__(self): return u'NKChild %s:%s' % (self.name, self.data) class RefToNKChild(models.Model): text = models.CharField(max_length=10) nk_fk = models.ForeignKey(NKChild, related_name='ref_fks') nk_m2m = models.ManyToManyField(NKChild, related_name='ref_m2ms') def __unicode__(self): return u'%s: Reference to %s [%s]' % ( self.text, self.nk_fk, ', '.join(str(o) for o in self.nk_m2m.all()) ) # ome models with pathological circular dependencies class Circle1(models.Model): name = models.CharField(max_length=255) def natural_key(self): return self.name natural_key.dependencies = ['fixtures_regress.circle2'] class Circle2(models.Model): name = models.CharField(max_length=255) def natural_key(self): return self.name natural_key.dependencies = ['fixtures_regress.circle1'] class Circle3(models.Model): name = models.CharField(max_length=255) def natural_key(self): return self.name natural_key.dependencies = ['fixtures_regress.circle3'] class Circle4(models.Model): name = models.CharField(max_length=255) def natural_key(self): return self.name natural_key.dependencies = ['fixtures_regress.circle5'] class Circle5(models.Model): name = models.CharField(max_length=255) def natural_key(self): return self.name natural_key.dependencies = ['fixtures_regress.circle6'] class Circle6(models.Model): name = models.CharField(max_length=255) def natural_key(self): return self.name natural_key.dependencies = ['fixtures_regress.circle4'] class ExternalDependency(models.Model): name = models.CharField(max_length=255) def natural_key(self): return self.name natural_key.dependencies = ['fixtures_regress.book'] # Model for regression test of #11101 class Thingy(models.Model): name = models.CharField(max_length=255)
mzdaniel/oh-mainline
vendor/packages/Django/tests/regressiontests/fixtures_regress/models.py
Python
agpl-3.0
5,411
def func(a1): """ Parameters: a1 (:class:`MyClass`): used to call :def:`my_function` and access :attr:`my_attr` Raises: :class:`MyException`: thrown in case of any error """
asedunov/intellij-community
python/testData/docstrings/typeReferences.py
Python
apache-2.0
206
"""Tests for parabolic cylinder functions. """ import numpy as np from numpy.testing import assert_allclose, assert_equal import scipy.special as sc def test_pbwa_segfault(): # Regression test for https://github.com/scipy/scipy/issues/6208. # # Data generated by mpmath. # w = 1.02276567211316867161 wp = -0.48887053372346189882 assert_allclose(sc.pbwa(0, 0), (w, wp), rtol=1e-13, atol=0) def test_pbwa_nan(): # Check that NaN's are returned outside of the range in which the # implementation is accurate. pts = [(-6, -6), (-6, 6), (6, -6), (6, 6)] for p in pts: assert_equal(sc.pbwa(*p), (np.nan, np.nan))
mbayon/TFG-MachineLearning
venv/lib/python3.6/site-packages/scipy/special/tests/test_pcf.py
Python
mit
664
#!/usr/bin/env python # # Copyright 2008 Jose Fonseca # # This program is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """Generate a dot graph from the output of several profilers.""" __author__ = "Jose Fonseca" __version__ = "1.0" import sys import math import os.path import re import textwrap import optparse try: # Debugging helper module import debug except ImportError: pass def percentage(p): return "%.02f%%" % (p*100.0,) def add(a, b): return a + b def equal(a, b): if a == b: return a else: return None def fail(a, b): assert False def ratio(numerator, denominator): numerator = float(numerator) denominator = float(denominator) assert 0.0 <= numerator assert numerator <= denominator try: return numerator/denominator except ZeroDivisionError: # 0/0 is undefined, but 1.0 yields more useful results return 1.0 class UndefinedEvent(Exception): """Raised when attempting to get an event which is undefined.""" def __init__(self, event): Exception.__init__(self) self.event = event def __str__(self): return 'unspecified event %s' % self.event.name class Event(object): """Describe a kind of event, and its basic operations.""" def __init__(self, name, null, aggregator, formatter = str): self.name = name self._null = null self._aggregator = aggregator self._formatter = formatter def __eq__(self, other): return self is other def __hash__(self): return id(self) def null(self): return self._null def aggregate(self, val1, val2): """Aggregate two event values.""" assert val1 is not None assert val2 is not None return self._aggregator(val1, val2) def format(self, val): """Format an event value.""" assert val is not None return self._formatter(val) MODULE = Event("Module", None, equal) PROCESS = Event("Process", None, equal) CALLS = Event("Calls", 0, add) SAMPLES = Event("Samples", 0, add) TIME = Event("Time", 0.0, add, lambda x: '(' + str(x) + ')') TIME_RATIO = Event("Time ratio", 0.0, add, lambda x: '(' + percentage(x) + ')') TOTAL_TIME = Event("Total time", 0.0, fail) TOTAL_TIME_RATIO = Event("Total time ratio", 0.0, fail, percentage) CALL_RATIO = Event("Call ratio", 0.0, add, percentage) PRUNE_RATIO = Event("Prune ratio", 0.0, add, percentage) class Object(object): """Base class for all objects in profile which can store events.""" def __init__(self, events=None): if events is None: self.events = {} else: self.events = events def __hash__(self): return id(self) def __eq__(self, other): return self is other def __contains__(self, event): return event in self.events def __getitem__(self, event): try: return self.events[event] except KeyError: raise UndefinedEvent(event) def __setitem__(self, event, value): if value is None: if event in self.events: del self.events[event] else: self.events[event] = value class Call(Object): """A call between functions. There should be at most one call object for every pair of functions. """ def __init__(self, callee_id): Object.__init__(self) self.callee_id = callee_id class Function(Object): """A function.""" def __init__(self, id, name): Object.__init__(self) self.id = id self.name = name self.calls = {} self.cycle = None def add_call(self, call): if call.callee_id in self.calls: sys.stderr.write('warning: overwriting call from function %s to %s\n' % (str(self.id), str(call.callee_id))) self.calls[call.callee_id] = call # TODO: write utility functions def __repr__(self): return self.name class Cycle(Object): """A cycle made from recursive function calls.""" def __init__(self): Object.__init__(self) # XXX: Do cycles need an id? self.functions = set() def add_function(self, function): assert function not in self.functions self.functions.add(function) # XXX: Aggregate events? if function.cycle is not None: for other in function.cycle.functions: if function not in self.functions: self.add_function(other) function.cycle = self class Profile(Object): """The whole profile.""" def __init__(self): Object.__init__(self) self.functions = {} self.cycles = [] def add_function(self, function): if function.id in self.functions: sys.stderr.write('warning: overwriting function %s (id %s)\n' % (function.name, str(function.id))) self.functions[function.id] = function def add_cycle(self, cycle): self.cycles.append(cycle) def validate(self): """Validate the edges.""" for function in self.functions.itervalues(): for callee_id in function.calls.keys(): assert function.calls[callee_id].callee_id == callee_id if callee_id not in self.functions: sys.stderr.write('warning: call to undefined function %s from function %s\n' % (str(callee_id), function.name)) del function.calls[callee_id] def find_cycles(self): """Find cycles using Tarjan's strongly connected components algorithm.""" # Apply the Tarjan's algorithm successively until all functions are visited visited = set() for function in self.functions.itervalues(): if function not in visited: self._tarjan(function, 0, [], {}, {}, visited) cycles = [] for function in self.functions.itervalues(): if function.cycle is not None and function.cycle not in cycles: cycles.append(function.cycle) self.cycles = cycles if 0: for cycle in cycles: sys.stderr.write("Cycle:\n") for member in cycle.functions: sys.stderr.write("\t%s\n" % member.name) def _tarjan(self, function, order, stack, orders, lowlinks, visited): """Tarjan's strongly connected components algorithm. See also: - http://en.wikipedia.org/wiki/Tarjan's_strongly_connected_components_algorithm """ visited.add(function) orders[function] = order lowlinks[function] = order order += 1 pos = len(stack) stack.append(function) for call in function.calls.itervalues(): callee = self.functions[call.callee_id] # TODO: use a set to optimize lookup if callee not in orders: order = self._tarjan(callee, order, stack, orders, lowlinks, visited) lowlinks[function] = min(lowlinks[function], lowlinks[callee]) elif callee in stack: lowlinks[function] = min(lowlinks[function], orders[callee]) if lowlinks[function] == orders[function]: # Strongly connected component found members = stack[pos:] del stack[pos:] if len(members) > 1: cycle = Cycle() for member in members: cycle.add_function(member) return order def call_ratios(self, event): # Aggregate for incoming calls cycle_totals = {} for cycle in self.cycles: cycle_totals[cycle] = 0.0 function_totals = {} for function in self.functions.itervalues(): function_totals[function] = 0.0 for function in self.functions.itervalues(): for call in function.calls.itervalues(): if call.callee_id != function.id: callee = self.functions[call.callee_id] function_totals[callee] += call[event] if callee.cycle is not None and callee.cycle is not function.cycle: cycle_totals[callee.cycle] += call[event] # Compute the ratios for function in self.functions.itervalues(): for call in function.calls.itervalues(): assert CALL_RATIO not in call if call.callee_id != function.id: callee = self.functions[call.callee_id] if callee.cycle is not None and callee.cycle is not function.cycle: total = cycle_totals[callee.cycle] else: total = function_totals[callee] call[CALL_RATIO] = ratio(call[event], total) def integrate(self, outevent, inevent): """Propagate function time ratio allong the function calls. Must be called after finding the cycles. See also: - http://citeseer.ist.psu.edu/graham82gprof.html """ # Sanity checking assert outevent not in self for function in self.functions.itervalues(): assert outevent not in function assert inevent in function for call in function.calls.itervalues(): assert outevent not in call if call.callee_id != function.id: assert CALL_RATIO in call # Aggregate the input for each cycle for cycle in self.cycles: total = inevent.null() for function in self.functions.itervalues(): total = inevent.aggregate(total, function[inevent]) self[inevent] = total # Integrate along the edges total = inevent.null() for function in self.functions.itervalues(): total = inevent.aggregate(total, function[inevent]) self._integrate_function(function, outevent, inevent) self[outevent] = total def _integrate_function(self, function, outevent, inevent): if function.cycle is not None: return self._integrate_cycle(function.cycle, outevent, inevent) else: if outevent not in function: total = function[inevent] for call in function.calls.itervalues(): if call.callee_id != function.id: total += self._integrate_call(call, outevent, inevent) function[outevent] = total return function[outevent] def _integrate_call(self, call, outevent, inevent): assert outevent not in call assert CALL_RATIO in call callee = self.functions[call.callee_id] subtotal = call[CALL_RATIO]*self._integrate_function(callee, outevent, inevent) call[outevent] = subtotal return subtotal def _integrate_cycle(self, cycle, outevent, inevent): if outevent not in cycle: total = inevent.null() for member in cycle.functions: subtotal = member[inevent] for call in member.calls.itervalues(): callee = self.functions[call.callee_id] if callee.cycle is not cycle: subtotal += self._integrate_call(call, outevent, inevent) total += subtotal cycle[outevent] = total callees = {} for function in self.functions.itervalues(): if function.cycle is not cycle: for call in function.calls.itervalues(): callee = self.functions[call.callee_id] if callee.cycle is cycle: try: callees[callee] += call[CALL_RATIO] except KeyError: callees[callee] = call[CALL_RATIO] for callee, call_ratio in callees.iteritems(): ranks = {} call_ratios = {} partials = {} self._rank_cycle_function(cycle, callee, 0, ranks) self._call_ratios_cycle(cycle, callee, ranks, call_ratios, set()) partial = self._integrate_cycle_function(cycle, callee, call_ratio, partials, ranks, call_ratios, outevent, inevent) assert partial == max(partials.values()) assert not total or abs(1.0 - partial/(call_ratio*total)) <= 0.001 return cycle[outevent] def _rank_cycle_function(self, cycle, function, rank, ranks): if function not in ranks or ranks[function] > rank: ranks[function] = rank for call in function.calls.itervalues(): if call.callee_id != function.id: callee = self.functions[call.callee_id] if callee.cycle is cycle: self._rank_cycle_function(cycle, callee, rank + 1, ranks) def _call_ratios_cycle(self, cycle, function, ranks, call_ratios, visited): if function not in visited: visited.add(function) for call in function.calls.itervalues(): if call.callee_id != function.id: callee = self.functions[call.callee_id] if callee.cycle is cycle: if ranks[callee] > ranks[function]: call_ratios[callee] = call_ratios.get(callee, 0.0) + call[CALL_RATIO] self._call_ratios_cycle(cycle, callee, ranks, call_ratios, visited) def _integrate_cycle_function(self, cycle, function, partial_ratio, partials, ranks, call_ratios, outevent, inevent): if function not in partials: partial = partial_ratio*function[inevent] for call in function.calls.itervalues(): if call.callee_id != function.id: callee = self.functions[call.callee_id] if callee.cycle is not cycle: assert outevent in call partial += partial_ratio*call[outevent] else: if ranks[callee] > ranks[function]: callee_partial = self._integrate_cycle_function(cycle, callee, partial_ratio, partials, ranks, call_ratios, outevent, inevent) call_ratio = ratio(call[CALL_RATIO], call_ratios[callee]) call_partial = call_ratio*callee_partial try: call[outevent] += call_partial except UndefinedEvent: call[outevent] = call_partial partial += call_partial partials[function] = partial try: function[outevent] += partial except UndefinedEvent: function[outevent] = partial return partials[function] def aggregate(self, event): """Aggregate an event for the whole profile.""" total = event.null() for function in self.functions.itervalues(): try: total = event.aggregate(total, function[event]) except UndefinedEvent: return self[event] = total def ratio(self, outevent, inevent): assert outevent not in self assert inevent in self for function in self.functions.itervalues(): assert outevent not in function assert inevent in function function[outevent] = ratio(function[inevent], self[inevent]) for call in function.calls.itervalues(): assert outevent not in call if inevent in call: call[outevent] = ratio(call[inevent], self[inevent]) self[outevent] = 1.0 def prune(self, node_thres, edge_thres): """Prune the profile""" # compute the prune ratios for function in self.functions.itervalues(): try: function[PRUNE_RATIO] = function[TOTAL_TIME_RATIO] except UndefinedEvent: pass for call in function.calls.itervalues(): callee = self.functions[call.callee_id] if TOTAL_TIME_RATIO in call: # handle exact cases first call[PRUNE_RATIO] = call[TOTAL_TIME_RATIO] else: try: # make a safe estimate call[PRUNE_RATIO] = min(function[TOTAL_TIME_RATIO], callee[TOTAL_TIME_RATIO]) except UndefinedEvent: pass # prune the nodes for function_id in self.functions.keys(): function = self.functions[function_id] try: if function[PRUNE_RATIO] < node_thres: del self.functions[function_id] except UndefinedEvent: pass # prune the egdes for function in self.functions.itervalues(): for callee_id in function.calls.keys(): call = function.calls[callee_id] try: if callee_id not in self.functions or call[PRUNE_RATIO] < edge_thres: del function.calls[callee_id] except UndefinedEvent: pass def dump(self): for function in self.functions.itervalues(): sys.stderr.write('Function %s:\n' % (function.name,)) self._dump_events(function.events) for call in function.calls.itervalues(): callee = self.functions[call.callee_id] sys.stderr.write(' Call %s:\n' % (callee.name,)) self._dump_events(call.events) def _dump_events(self, events): for event, value in events.iteritems(): sys.stderr.write(' %s: %s\n' % (event.name, event.format(value))) class Struct: """Masquerade a dictionary with a structure-like behavior.""" def __init__(self, attrs = None): if attrs is None: attrs = {} self.__dict__['_attrs'] = attrs def __getattr__(self, name): try: return self._attrs[name] except KeyError: raise AttributeError(name) def __setattr__(self, name, value): self._attrs[name] = value def __str__(self): return str(self._attrs) def __repr__(self): return repr(self._attrs) class ParseError(Exception): """Raised when parsing to signal mismatches.""" def __init__(self, msg, line): self.msg = msg # TODO: store more source line information self.line = line def __str__(self): return '%s: %r' % (self.msg, self.line) class Parser: """Parser interface.""" def __init__(self): pass def parse(self): raise NotImplementedError class LineParser(Parser): """Base class for parsers that read line-based formats.""" def __init__(self, file): Parser.__init__(self) self._file = file self.__line = None self.__eof = False def readline(self): line = self._file.readline() if not line: self.__line = '' self.__eof = True self.__line = line.rstrip('\r\n') def lookahead(self): assert self.__line is not None return self.__line def consume(self): assert self.__line is not None line = self.__line self.readline() return line def eof(self): assert self.__line is not None return self.__eof class GprofParser(Parser): """Parser for GNU gprof output. See also: - Chapter "Interpreting gprof's Output" from the GNU gprof manual http://sourceware.org/binutils/docs-2.18/gprof/Call-Graph.html#Call-Graph - File "cg_print.c" from the GNU gprof source code http://sourceware.org/cgi-bin/cvsweb.cgi/~checkout~/src/gprof/cg_print.c?rev=1.12&cvsroot=src """ def __init__(self, fp): Parser.__init__(self) self.fp = fp self.functions = {} self.cycles = {} def readline(self): line = self.fp.readline() if not line: sys.stderr.write('error: unexpected end of file\n') sys.exit(1) line = line.rstrip('\r\n') return line _int_re = re.compile(r'^\d+$') _float_re = re.compile(r'^\d+\.\d+$') def translate(self, mo): """Extract a structure from a match object, while translating the types in the process.""" attrs = {} groupdict = mo.groupdict() for name, value in groupdict.iteritems(): if value is None: value = None elif self._int_re.match(value): value = int(value) elif self._float_re.match(value): value = float(value) attrs[name] = (value) return Struct(attrs) _cg_header_re = re.compile( # original gprof header r'^\s+called/total\s+parents\s*$|' + r'^index\s+%time\s+self\s+descendents\s+called\+self\s+name\s+index\s*$|' + r'^\s+called/total\s+children\s*$|' + # GNU gprof header r'^index\s+%\s+time\s+self\s+children\s+called\s+name\s*$' ) _cg_ignore_re = re.compile( # spontaneous r'^\s+<spontaneous>\s*$|' # internal calls (such as "mcount") r'^.*\((\d+)\)$' ) _cg_primary_re = re.compile( r'^\[(?P<index>\d+)\]' + r'\s+(?P<percentage_time>\d+\.\d+)' + r'\s+(?P<self>\d+\.\d+)' + r'\s+(?P<descendants>\d+\.\d+)' + r'\s+(?:(?P<called>\d+)(?:\+(?P<called_self>\d+))?)?' + r'\s+(?P<name>\S.*?)' + r'(?:\s+<cycle\s(?P<cycle>\d+)>)?' + r'\s\[(\d+)\]$' ) _cg_parent_re = re.compile( r'^\s+(?P<self>\d+\.\d+)?' + r'\s+(?P<descendants>\d+\.\d+)?' + r'\s+(?P<called>\d+)(?:/(?P<called_total>\d+))?' + r'\s+(?P<name>\S.*?)' + r'(?:\s+<cycle\s(?P<cycle>\d+)>)?' + r'\s\[(?P<index>\d+)\]$' ) _cg_child_re = _cg_parent_re _cg_cycle_header_re = re.compile( r'^\[(?P<index>\d+)\]' + r'\s+(?P<percentage_time>\d+\.\d+)' + r'\s+(?P<self>\d+\.\d+)' + r'\s+(?P<descendants>\d+\.\d+)' + r'\s+(?:(?P<called>\d+)(?:\+(?P<called_self>\d+))?)?' + r'\s+<cycle\s(?P<cycle>\d+)\sas\sa\swhole>' + r'\s\[(\d+)\]$' ) _cg_cycle_member_re = re.compile( r'^\s+(?P<self>\d+\.\d+)?' + r'\s+(?P<descendants>\d+\.\d+)?' + r'\s+(?P<called>\d+)(?:\+(?P<called_self>\d+))?' + r'\s+(?P<name>\S.*?)' + r'(?:\s+<cycle\s(?P<cycle>\d+)>)?' + r'\s\[(?P<index>\d+)\]$' ) _cg_sep_re = re.compile(r'^--+$') def parse_function_entry(self, lines): parents = [] children = [] while True: if not lines: sys.stderr.write('warning: unexpected end of entry\n') line = lines.pop(0) if line.startswith('['): break # read function parent line mo = self._cg_parent_re.match(line) if not mo: if self._cg_ignore_re.match(line): continue sys.stderr.write('warning: unrecognized call graph entry: %r\n' % line) else: parent = self.translate(mo) parents.append(parent) # read primary line mo = self._cg_primary_re.match(line) if not mo: sys.stderr.write('warning: unrecognized call graph entry: %r\n' % line) return else: function = self.translate(mo) while lines: line = lines.pop(0) # read function subroutine line mo = self._cg_child_re.match(line) if not mo: if self._cg_ignore_re.match(line): continue sys.stderr.write('warning: unrecognized call graph entry: %r\n' % line) else: child = self.translate(mo) children.append(child) function.parents = parents function.children = children self.functions[function.index] = function def parse_cycle_entry(self, lines): # read cycle header line line = lines[0] mo = self._cg_cycle_header_re.match(line) if not mo: sys.stderr.write('warning: unrecognized call graph entry: %r\n' % line) return cycle = self.translate(mo) # read cycle member lines cycle.functions = [] for line in lines[1:]: mo = self._cg_cycle_member_re.match(line) if not mo: sys.stderr.write('warning: unrecognized call graph entry: %r\n' % line) continue call = self.translate(mo) cycle.functions.append(call) self.cycles[cycle.cycle] = cycle def parse_cg_entry(self, lines): if lines[0].startswith("["): self.parse_cycle_entry(lines) else: self.parse_function_entry(lines) def parse_cg(self): """Parse the call graph.""" # skip call graph header while not self._cg_header_re.match(self.readline()): pass line = self.readline() while self._cg_header_re.match(line): line = self.readline() # process call graph entries entry_lines = [] while line != '\014': # form feed if line and not line.isspace(): if self._cg_sep_re.match(line): self.parse_cg_entry(entry_lines) entry_lines = [] else: entry_lines.append(line) line = self.readline() def parse(self): self.parse_cg() self.fp.close() profile = Profile() profile[TIME] = 0.0 cycles = {} for index in self.cycles.iterkeys(): cycles[index] = Cycle() for entry in self.functions.itervalues(): # populate the function function = Function(entry.index, entry.name) function[TIME] = entry.self if entry.called is not None: function[CALLS] = entry.called if entry.called_self is not None: call = Call(entry.index) call[CALLS] = entry.called_self function[CALLS] += entry.called_self # populate the function calls for child in entry.children: call = Call(child.index) assert child.called is not None call[CALLS] = child.called if child.index not in self.functions: # NOTE: functions that were never called but were discovered by gprof's # static call graph analysis dont have a call graph entry so we need # to add them here missing = Function(child.index, child.name) function[TIME] = 0.0 function[CALLS] = 0 profile.add_function(missing) function.add_call(call) profile.add_function(function) if entry.cycle is not None: cycles[entry.cycle].add_function(function) profile[TIME] = profile[TIME] + function[TIME] for cycle in cycles.itervalues(): profile.add_cycle(cycle) # Compute derived events profile.validate() profile.ratio(TIME_RATIO, TIME) profile.call_ratios(CALLS) profile.integrate(TOTAL_TIME, TIME) profile.ratio(TOTAL_TIME_RATIO, TOTAL_TIME) return profile class OprofileParser(LineParser): """Parser for oprofile callgraph output. See also: - http://oprofile.sourceforge.net/doc/opreport.html#opreport-callgraph """ _fields_re = { 'samples': r'(?P<samples>\d+)', '%': r'(?P<percentage>\S+)', 'linenr info': r'(?P<source>\(no location information\)|\S+:\d+)', 'image name': r'(?P<image>\S+(?:\s\(tgid:[^)]*\))?)', 'app name': r'(?P<application>\S+)', 'symbol name': r'(?P<symbol>\(no symbols\)|.+?)', } def __init__(self, infile): LineParser.__init__(self, infile) self.entries = {} self.entry_re = None def add_entry(self, callers, function, callees): try: entry = self.entries[function.id] except KeyError: self.entries[function.id] = (callers, function, callees) else: callers_total, function_total, callees_total = entry self.update_subentries_dict(callers_total, callers) function_total.samples += function.samples self.update_subentries_dict(callees_total, callees) def update_subentries_dict(self, totals, partials): for partial in partials.itervalues(): try: total = totals[partial.id] except KeyError: totals[partial.id] = partial else: total.samples += partial.samples def parse(self): # read lookahead self.readline() self.parse_header() while self.lookahead(): self.parse_entry() profile = Profile() reverse_call_samples = {} # populate the profile profile[SAMPLES] = 0 for _callers, _function, _callees in self.entries.itervalues(): function = Function(_function.id, _function.name) function[SAMPLES] = _function.samples profile.add_function(function) profile[SAMPLES] += _function.samples if _function.application: function[PROCESS] = os.path.basename(_function.application) if _function.image: function[MODULE] = os.path.basename(_function.image) total_callee_samples = 0 for _callee in _callees.itervalues(): total_callee_samples += _callee.samples for _callee in _callees.itervalues(): if not _callee.self: call = Call(_callee.id) call[SAMPLES] = _callee.samples function.add_call(call) # compute derived data profile.validate() profile.find_cycles() profile.ratio(TIME_RATIO, SAMPLES) profile.call_ratios(SAMPLES) profile.integrate(TOTAL_TIME_RATIO, TIME_RATIO) return profile def parse_header(self): while not self.match_header(): self.consume() line = self.lookahead() fields = re.split(r'\s\s+', line) entry_re = r'^\s*' + r'\s+'.join([self._fields_re[field] for field in fields]) + r'(?P<self>\s+\[self\])?$' self.entry_re = re.compile(entry_re) self.skip_separator() def parse_entry(self): callers = self.parse_subentries() if self.match_primary(): function = self.parse_subentry() if function is not None: callees = self.parse_subentries() self.add_entry(callers, function, callees) self.skip_separator() def parse_subentries(self): subentries = {} while self.match_secondary(): subentry = self.parse_subentry() subentries[subentry.id] = subentry return subentries def parse_subentry(self): entry = Struct() line = self.consume() mo = self.entry_re.match(line) if not mo: raise ParseError('failed to parse', line) fields = mo.groupdict() entry.samples = int(fields.get('samples', 0)) entry.percentage = float(fields.get('percentage', 0.0)) if 'source' in fields and fields['source'] != '(no location information)': source = fields['source'] filename, lineno = source.split(':') entry.filename = filename entry.lineno = int(lineno) else: source = '' entry.filename = None entry.lineno = None entry.image = fields.get('image', '') entry.application = fields.get('application', '') if 'symbol' in fields and fields['symbol'] != '(no symbols)': entry.symbol = fields['symbol'] else: entry.symbol = '' if entry.symbol.startswith('"') and entry.symbol.endswith('"'): entry.symbol = entry.symbol[1:-1] entry.id = ':'.join((entry.application, entry.image, source, entry.symbol)) entry.self = fields.get('self', None) != None if entry.self: entry.id += ':self' if entry.symbol: entry.name = entry.symbol else: entry.name = entry.image return entry def skip_separator(self): while not self.match_separator(): self.consume() self.consume() def match_header(self): line = self.lookahead() return line.startswith('samples') def match_separator(self): line = self.lookahead() return line == '-'*len(line) def match_primary(self): line = self.lookahead() return not line[:1].isspace() def match_secondary(self): line = self.lookahead() return line[:1].isspace() class SharkParser(LineParser): """Parser for MacOSX Shark output. Author: [email protected] """ def __init__(self, infile): LineParser.__init__(self, infile) self.stack = [] self.entries = {} def add_entry(self, function): try: entry = self.entries[function.id] except KeyError: self.entries[function.id] = (function, { }) else: function_total, callees_total = entry function_total.samples += function.samples def add_callee(self, function, callee): func, callees = self.entries[function.id] try: entry = callees[callee.id] except KeyError: callees[callee.id] = callee else: entry.samples += callee.samples def parse(self): self.readline() self.readline() self.readline() self.readline() match = re.compile(r'(?P<prefix>[|+ ]*)(?P<samples>\d+), (?P<symbol>[^,]+), (?P<image>.*)') while self.lookahead(): line = self.consume() mo = match.match(line) if not mo: raise ParseError('failed to parse', line) fields = mo.groupdict() prefix = len(fields.get('prefix', 0)) / 2 - 1 symbol = str(fields.get('symbol', 0)) image = str(fields.get('image', 0)) entry = Struct() entry.id = ':'.join([symbol, image]) entry.samples = int(fields.get('samples', 0)) entry.name = symbol entry.image = image # adjust the callstack if prefix < len(self.stack): del self.stack[prefix:] if prefix == len(self.stack): self.stack.append(entry) # if the callstack has had an entry, it's this functions caller if prefix > 0: self.add_callee(self.stack[prefix - 1], entry) self.add_entry(entry) profile = Profile() profile[SAMPLES] = 0 for _function, _callees in self.entries.itervalues(): function = Function(_function.id, _function.name) function[SAMPLES] = _function.samples profile.add_function(function) profile[SAMPLES] += _function.samples if _function.image: function[MODULE] = os.path.basename(_function.image) for _callee in _callees.itervalues(): call = Call(_callee.id) call[SAMPLES] = _callee.samples function.add_call(call) # compute derived data profile.validate() profile.find_cycles() profile.ratio(TIME_RATIO, SAMPLES) profile.call_ratios(SAMPLES) profile.integrate(TOTAL_TIME_RATIO, TIME_RATIO) return profile class PstatsParser: """Parser python profiling statistics saved with te pstats module.""" def __init__(self, *filename): import pstats self.stats = pstats.Stats(*filename) self.profile = Profile() self.function_ids = {} def get_function_name(self, (filename, line, name)): module = os.path.splitext(filename)[0] module = os.path.basename(module) return "%s:%d:%s" % (module, line, name) def get_function(self, key): try: id = self.function_ids[key] except KeyError: id = len(self.function_ids) name = self.get_function_name(key) function = Function(id, name) self.profile.functions[id] = function self.function_ids[key] = id else: function = self.profile.functions[id] return function def parse(self): self.profile[TIME] = 0.0 self.profile[TOTAL_TIME] = self.stats.total_tt for fn, (cc, nc, tt, ct, callers) in self.stats.stats.iteritems(): callee = self.get_function(fn) callee[CALLS] = nc callee[TOTAL_TIME] = ct callee[TIME] = tt self.profile[TIME] += tt self.profile[TOTAL_TIME] = max(self.profile[TOTAL_TIME], ct) for fn, value in callers.iteritems(): caller = self.get_function(fn) call = Call(callee.id) if isinstance(value, tuple): for i in xrange(0, len(value), 4): nc, cc, tt, ct = value[i:i+4] if CALLS in call: call[CALLS] += cc else: call[CALLS] = cc if TOTAL_TIME in call: call[TOTAL_TIME] += ct else: call[TOTAL_TIME] = ct else: call[CALLS] = value call[TOTAL_TIME] = ratio(value, nc)*ct caller.add_call(call) #self.stats.print_stats() #self.stats.print_callees() # Compute derived events self.profile.validate() self.profile.ratio(TIME_RATIO, TIME) self.profile.ratio(TOTAL_TIME_RATIO, TOTAL_TIME) return self.profile class Theme: def __init__(self, bgcolor = (0.0, 0.0, 1.0), mincolor = (0.0, 0.0, 0.0), maxcolor = (0.0, 0.0, 1.0), fontname = "Arial", minfontsize = 10.0, maxfontsize = 10.0, minpenwidth = 0.5, maxpenwidth = 4.0, gamma = 2.2): self.bgcolor = bgcolor self.mincolor = mincolor self.maxcolor = maxcolor self.fontname = fontname self.minfontsize = minfontsize self.maxfontsize = maxfontsize self.minpenwidth = minpenwidth self.maxpenwidth = maxpenwidth self.gamma = gamma def graph_bgcolor(self): return self.hsl_to_rgb(*self.bgcolor) def graph_fontname(self): return self.fontname def graph_fontsize(self): return self.minfontsize def node_bgcolor(self, weight): return self.color(weight) def node_fgcolor(self, weight): return self.graph_bgcolor() def node_fontsize(self, weight): return self.fontsize(weight) def edge_color(self, weight): return self.color(weight) def edge_fontsize(self, weight): return self.fontsize(weight) def edge_penwidth(self, weight): return max(weight*self.maxpenwidth, self.minpenwidth) def edge_arrowsize(self, weight): return 0.5 * math.sqrt(self.edge_penwidth(weight)) def fontsize(self, weight): return max(weight**2 * self.maxfontsize, self.minfontsize) def color(self, weight): weight = min(max(weight, 0.0), 1.0) hmin, smin, lmin = self.mincolor hmax, smax, lmax = self.maxcolor h = hmin + weight*(hmax - hmin) s = smin + weight*(smax - smin) l = lmin + weight*(lmax - lmin) return self.hsl_to_rgb(h, s, l) def hsl_to_rgb(self, h, s, l): """Convert a color from HSL color-model to RGB. See also: - http://www.w3.org/TR/css3-color/#hsl-color """ h = h % 1.0 s = min(max(s, 0.0), 1.0) l = min(max(l, 0.0), 1.0) if l <= 0.5: m2 = l*(s + 1.0) else: m2 = l + s - l*s m1 = l*2.0 - m2 r = self._hue_to_rgb(m1, m2, h + 1.0/3.0) g = self._hue_to_rgb(m1, m2, h) b = self._hue_to_rgb(m1, m2, h - 1.0/3.0) # Apply gamma correction r **= self.gamma g **= self.gamma b **= self.gamma return (r, g, b) def _hue_to_rgb(self, m1, m2, h): if h < 0.0: h += 1.0 elif h > 1.0: h -= 1.0 if h*6 < 1.0: return m1 + (m2 - m1)*h*6.0 elif h*2 < 1.0: return m2 elif h*3 < 2.0: return m1 + (m2 - m1)*(2.0/3.0 - h)*6.0 else: return m1 TEMPERATURE_COLORMAP = Theme( mincolor = (2.0/3.0, 0.80, 0.25), # dark blue maxcolor = (0.0, 1.0, 0.5), # satured red gamma = 1.0 ) PINK_COLORMAP = Theme( mincolor = (0.0, 1.0, 0.90), # pink maxcolor = (0.0, 1.0, 0.5), # satured red ) GRAY_COLORMAP = Theme( mincolor = (0.0, 0.0, 0.85), # light gray maxcolor = (0.0, 0.0, 0.0), # black ) BW_COLORMAP = Theme( minfontsize = 8.0, maxfontsize = 24.0, mincolor = (0.0, 0.0, 0.0), # black maxcolor = (0.0, 0.0, 0.0), # black minpenwidth = 0.1, maxpenwidth = 8.0, ) class DotWriter: """Writer for the DOT language. See also: - "The DOT Language" specification http://www.graphviz.org/doc/info/lang.html """ def __init__(self, fp): self.fp = fp def graph(self, profile, theme): self.begin_graph() fontname = theme.graph_fontname() self.attr('graph', fontname=fontname, ranksep=0.25, nodesep=0.125) self.attr('node', fontname=fontname, shape="box", style="filled,rounded", fontcolor="white", width=0, height=0) self.attr('edge', fontname=fontname) for function in profile.functions.itervalues(): labels = [] for event in PROCESS, MODULE: if event in function.events: label = event.format(function[event]) labels.append(label) labels.append(function.name) for event in TOTAL_TIME_RATIO, TIME_RATIO, CALLS: if event in function.events: label = event.format(function[event]) labels.append(label) try: weight = function[PRUNE_RATIO] except UndefinedEvent: weight = 0.0 label = '\n'.join(labels) self.node(function.id, label = label, color = self.color(theme.node_bgcolor(weight)), fontcolor = self.color(theme.node_fgcolor(weight)), fontsize = "%.2f" % theme.node_fontsize(weight), ) for call in function.calls.itervalues(): callee = profile.functions[call.callee_id] labels = [] for event in TOTAL_TIME_RATIO, CALLS: if event in call.events: label = event.format(call[event]) labels.append(label) try: weight = call[PRUNE_RATIO] except UndefinedEvent: try: weight = callee[PRUNE_RATIO] except UndefinedEvent: weight = 0.0 label = '\n'.join(labels) self.edge(function.id, call.callee_id, label = label, color = self.color(theme.edge_color(weight)), fontcolor = self.color(theme.edge_color(weight)), fontsize = "%.2f" % theme.edge_fontsize(weight), penwidth = "%.2f" % theme.edge_penwidth(weight), labeldistance = "%.2f" % theme.edge_penwidth(weight), arrowsize = "%.2f" % theme.edge_arrowsize(weight), ) self.end_graph() def begin_graph(self): self.write('digraph {\n') def end_graph(self): self.write('}\n') def attr(self, what, **attrs): self.write("\t") self.write(what) self.attr_list(attrs) self.write(";\n") def node(self, node, **attrs): self.write("\t") self.id(node) self.attr_list(attrs) self.write(";\n") def edge(self, src, dst, **attrs): self.write("\t") self.id(src) self.write(" -> ") self.id(dst) self.attr_list(attrs) self.write(";\n") def attr_list(self, attrs): if not attrs: return self.write(' [') first = True for name, value in attrs.iteritems(): if first: first = False else: self.write(", ") self.id(name) self.write('=') self.id(value) self.write(']') def id(self, id): if isinstance(id, (int, float)): s = str(id) elif isinstance(id, str): if id.isalnum(): s = id else: s = self.escape(id) else: raise TypeError self.write(s) def color(self, (r, g, b)): def float2int(f): if f <= 0.0: return 0 if f >= 1.0: return 255 return int(255.0*f + 0.5) return "#" + "".join(["%02x" % float2int(c) for c in (r, g, b)]) def escape(self, s): s = s.encode('utf-8') s = s.replace('\\', r'\\') s = s.replace('\n', r'\n') s = s.replace('\t', r'\t') s = s.replace('"', r'\"') return '"' + s + '"' def write(self, s): self.fp.write(s) class Main: """Main program.""" themes = { "color": TEMPERATURE_COLORMAP, "pink": PINK_COLORMAP, "gray": GRAY_COLORMAP, "bw": BW_COLORMAP, } def main(self): """Main program.""" parser = optparse.OptionParser( usage="\n\t%prog [options] [file] ...", version="%%prog %s" % __version__) parser.add_option( '-o', '--output', metavar='FILE', type="string", dest="output", help="output filename [stdout]") parser.add_option( '-n', '--node-thres', metavar='PERCENTAGE', type="float", dest="node_thres", default=0.5, help="eliminate nodes below this threshold [default: %default]") parser.add_option( '-e', '--edge-thres', metavar='PERCENTAGE', type="float", dest="edge_thres", default=0.1, help="eliminate edges below this threshold [default: %default]") parser.add_option( '-f', '--format', type="choice", choices=('prof', 'oprofile', 'pstats', 'shark'), dest="format", default="prof", help="profile format: prof, oprofile, or pstats [default: %default]") parser.add_option( '-c', '--colormap', type="choice", choices=('color', 'pink', 'gray', 'bw'), dest="theme", default="color", help="color map: color, pink, gray, or bw [default: %default]") parser.add_option( '-s', '--strip', action="store_true", dest="strip", default=False, help="strip function parameters, template parameters, and const modifiers from demangled C++ function names") parser.add_option( '-w', '--wrap', action="store_true", dest="wrap", default=False, help="wrap function names") (self.options, self.args) = parser.parse_args(sys.argv[1:]) if len(self.args) > 1 and self.options.format != 'pstats': parser.error('incorrect number of arguments') try: self.theme = self.themes[self.options.theme] except KeyError: parser.error('invalid colormap \'%s\'' % self.options.theme) if self.options.format == 'prof': if not self.args: fp = sys.stdin else: fp = open(self.args[0], 'rt') parser = GprofParser(fp) elif self.options.format == 'oprofile': if not self.args: fp = sys.stdin else: fp = open(self.args[0], 'rt') parser = OprofileParser(fp) elif self.options.format == 'pstats': if not self.args: parser.error('at least a file must be specified for pstats input') parser = PstatsParser(*self.args) elif self.options.format == 'shark': if not self.args: fp = sys.stdin else: fp = open(self.args[0], 'rt') parser = SharkParser(fp) else: parser.error('invalid format \'%s\'' % self.options.format) self.profile = parser.parse() if self.options.output is None: self.output = sys.stdout else: self.output = open(self.options.output, 'wt') self.write_graph() _parenthesis_re = re.compile(r'\([^()]*\)') _angles_re = re.compile(r'<[^<>]*>') _const_re = re.compile(r'\s+const$') def strip_function_name(self, name): """Remove extraneous information from C++ demangled function names.""" # Strip function parameters from name by recursively removing paired parenthesis while True: name, n = self._parenthesis_re.subn('', name) if not n: break # Strip const qualifier name = self._const_re.sub('', name) # Strip template parameters from name by recursively removing paired angles while True: name, n = self._angles_re.subn('', name) if not n: break return name def wrap_function_name(self, name): """Split the function name on multiple lines.""" if len(name) > 32: ratio = 2.0/3.0 height = max(int(len(name)/(1.0 - ratio) + 0.5), 1) width = max(len(name)/height, 32) # TODO: break lines in symbols name = textwrap.fill(name, width, break_long_words=False) # Take away spaces name = name.replace(", ", ",") name = name.replace("> >", ">>") name = name.replace("> >", ">>") # catch consecutive return name def compress_function_name(self, name): """Compress function name according to the user preferences.""" if self.options.strip: name = self.strip_function_name(name) if self.options.wrap: name = self.wrap_function_name(name) # TODO: merge functions with same resulting name return name def write_graph(self): dot = DotWriter(self.output) profile = self.profile profile.prune(self.options.node_thres/100.0, self.options.edge_thres/100.0) for function in profile.functions.itervalues(): function.name = self.compress_function_name(function.name) dot.graph(profile, self.theme) if __name__ == '__main__': Main().main()
dpimenov/tvdb_api
tests/gprof2dot.py
Python
unlicense
53,218
# -*- test-case-name: twisted.test.test_twistd -*- # Copyright (c) 2001-2008 Twisted Matrix Laboratories. # See LICENSE for details. """ The Twisted Daemon: platform-independent interface. @author: Christopher Armstrong """ from twisted.application import app from twisted.python.runtime import platformType if platformType == "win32": from twisted.scripts._twistw import ServerOptions, \ WindowsApplicationRunner as _SomeApplicationRunner else: from twisted.scripts._twistd_unix import ServerOptions, \ UnixApplicationRunner as _SomeApplicationRunner def runApp(config): _SomeApplicationRunner(config).run() def run(): app.run(runApp, ServerOptions) __all__ = ['run', 'runApp']
sorenh/cc
vendor/Twisted-10.0.0/twisted/scripts/twistd.py
Python
apache-2.0
721
#! /usr/bin/python # # Protocol Buffers - Google's data interchange format # Copyright 2008 Google Inc. All rights reserved. # https://developers.google.com/protocol-buffers/ # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Tests for google.protobuf.pyext behavior.""" __author__ = '[email protected] (Anuraag Agrawal)' import os os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION'] = 'cpp' os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION'] = '2' # We must set the implementation version above before the google3 imports. # pylint: disable=g-import-not-at-top from google.apputils import basetest from google.protobuf.internal import api_implementation # Run all tests from the original module by putting them in our namespace. # pylint: disable=wildcard-import from google.protobuf.internal.descriptor_test import * class ConfirmCppApi2Test(basetest.TestCase): def testImplementationSetting(self): self.assertEqual('cpp', api_implementation.Type()) self.assertEqual(2, api_implementation.Version()) if __name__ == '__main__': basetest.main()
cherrishes/weilai
xingxing/protobuf/python/lib/Python3.4/google/protobuf/pyext/descriptor_cpp2_test.py
Python
apache-2.0
2,506
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for TransformedDistribution.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from scipy import stats from tensorflow.contrib import distributions from tensorflow.contrib import linalg from tensorflow.contrib.distributions.python.ops import bijectors from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import tensor_shape from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.platform import test bs = bijectors ds = distributions la = linalg class TransformedDistributionTest(test.TestCase): def _cls(self): return ds.TransformedDistribution def testTransformedDistribution(self): g = ops.Graph() with g.as_default(): mu = 3.0 sigma = 2.0 # Note: the Jacobian callable only works for this example; more generally # you may or may not need a reduce_sum. log_normal = self._cls()( distribution=ds.Normal(loc=mu, scale=sigma), bijector=bs.Exp(event_ndims=0)) sp_dist = stats.lognorm(s=sigma, scale=np.exp(mu)) # sample sample = log_normal.sample(100000, seed=235) self.assertAllEqual([], log_normal.event_shape) with self.test_session(graph=g): self.assertAllEqual([], log_normal.event_shape_tensor().eval()) self.assertAllClose( sp_dist.mean(), np.mean(sample.eval()), atol=0.0, rtol=0.05) # pdf, log_pdf, cdf, etc... # The mean of the lognormal is around 148. test_vals = np.linspace(0.1, 1000., num=20).astype(np.float32) for func in [[log_normal.log_prob, sp_dist.logpdf], [log_normal.prob, sp_dist.pdf], [log_normal.log_cdf, sp_dist.logcdf], [log_normal.cdf, sp_dist.cdf], [log_normal.survival_function, sp_dist.sf], [log_normal.log_survival_function, sp_dist.logsf]]: actual = func[0](test_vals) expected = func[1](test_vals) with self.test_session(graph=g): self.assertAllClose(expected, actual.eval(), atol=0, rtol=0.01) def testCachedSamplesWithoutInverse(self): with self.test_session() as sess: mu = 3.0 sigma = 0.02 log_normal = self._cls()( distribution=ds.Normal(loc=mu, scale=sigma), bijector=bs.Exp(event_ndims=0)) sample = log_normal.sample(1) sample_val, log_pdf_val = sess.run([sample, log_normal.log_prob(sample)]) self.assertAllClose( stats.lognorm.logpdf(sample_val, s=sigma, scale=np.exp(mu)), log_pdf_val, atol=1e-2) def testShapeChangingBijector(self): with self.test_session(): softmax = bs.SoftmaxCentered() standard_normal = ds.Normal(loc=0., scale=1.) multi_logit_normal = self._cls()( distribution=standard_normal, bijector=softmax) x = [[-np.log(3.), 0.], [np.log(3), np.log(5)]] y = softmax.forward(x).eval() expected_log_pdf = (stats.norm(loc=0., scale=1.).logpdf(x) - np.sum(np.log(y), axis=-1)) self.assertAllClose(expected_log_pdf, multi_logit_normal.log_prob(y).eval()) self.assertAllClose( [1, 2, 3, 2], array_ops.shape(multi_logit_normal.sample([1, 2, 3])).eval()) self.assertAllEqual([2], multi_logit_normal.event_shape) self.assertAllEqual([2], multi_logit_normal.event_shape_tensor().eval()) def testEntropy(self): with self.test_session(): shift = np.array([[-1, 0, 1], [-1, -2, -3]], dtype=np.float32) diag = np.array([[1, 2, 3], [2, 3, 2]], dtype=np.float32) actual_mvn_entropy = np.concatenate([ [stats.multivariate_normal(shift[i], np.diag(diag[i]**2)).entropy()] for i in range(len(diag))]) fake_mvn = self._cls()( ds.MultivariateNormalDiag( loc=array_ops.zeros_like(shift), scale_diag=array_ops.ones_like(diag), validate_args=True), bs.AffineLinearOperator( shift, scale=la.LinearOperatorDiag(diag, is_non_singular=True), validate_args=True), validate_args=True) self.assertAllClose(actual_mvn_entropy, fake_mvn.entropy().eval()) class ScalarToMultiTest(test.TestCase): def _cls(self): return ds.TransformedDistribution def setUp(self): self._shift = np.array([-1, 0, 1], dtype=np.float32) self._tril = np.array([[[1., 0, 0], [2, 1, 0], [3, 2, 1]], [[2, 0, 0], [3, 2, 0], [4, 3, 2]]], dtype=np.float32) def _testMVN(self, base_distribution_class, base_distribution_kwargs, batch_shape=(), event_shape=(), not_implemented_message=None): with self.test_session() as sess: # Overriding shapes must be compatible w/bijector; most bijectors are # batch_shape agnostic and only care about event_ndims. # In the case of `Affine`, if we got it wrong then it would fire an # exception due to incompatible dimensions. batch_shape_pl = array_ops.placeholder( dtypes.int32, name="dynamic_batch_shape") event_shape_pl = array_ops.placeholder( dtypes.int32, name="dynamic_event_shape") feed_dict = {batch_shape_pl: np.array(batch_shape, dtype=np.int32), event_shape_pl: np.array(event_shape, dtype=np.int32)} fake_mvn_dynamic = self._cls()( distribution=base_distribution_class(validate_args=True, **base_distribution_kwargs), bijector=bs.Affine(shift=self._shift, scale_tril=self._tril), batch_shape=batch_shape_pl, event_shape=event_shape_pl, validate_args=True) fake_mvn_static = self._cls()( distribution=base_distribution_class(validate_args=True, **base_distribution_kwargs), bijector=bs.Affine(shift=self._shift, scale_tril=self._tril), batch_shape=batch_shape, event_shape=event_shape, validate_args=True) actual_mean = np.tile(self._shift, [2, 1]) # Affine elided this tile. actual_cov = np.matmul(self._tril, np.transpose(self._tril, [0, 2, 1])) def actual_mvn_log_prob(x): return np.concatenate([ [stats.multivariate_normal( actual_mean[i], actual_cov[i]).logpdf(x[:, i, :])] for i in range(len(actual_cov))]).T actual_mvn_entropy = np.concatenate([ [stats.multivariate_normal( actual_mean[i], actual_cov[i]).entropy()] for i in range(len(actual_cov))]) self.assertAllEqual([3], fake_mvn_static.event_shape) self.assertAllEqual([2], fake_mvn_static.batch_shape) self.assertAllEqual(tensor_shape.TensorShape(None), fake_mvn_dynamic.event_shape) self.assertAllEqual(tensor_shape.TensorShape(None), fake_mvn_dynamic.batch_shape) x = fake_mvn_static.sample(5, seed=0).eval() for unsupported_fn in (fake_mvn_static.log_cdf, fake_mvn_static.cdf, fake_mvn_static.survival_function, fake_mvn_static.log_survival_function): with self.assertRaisesRegexp(NotImplementedError, not_implemented_message): unsupported_fn(x) num_samples = 5e3 for fake_mvn, feed_dict in ((fake_mvn_static, {}), (fake_mvn_dynamic, feed_dict)): # Ensure sample works by checking first, second moments. y = fake_mvn.sample(int(num_samples), seed=0) x = y[0:5, ...] sample_mean = math_ops.reduce_mean(y, 0) centered_y = array_ops.transpose(y - sample_mean, [1, 2, 0]) sample_cov = math_ops.matmul( centered_y, centered_y, transpose_b=True) / num_samples [ sample_mean_, sample_cov_, x_, fake_event_shape_, fake_batch_shape_, fake_log_prob_, fake_prob_, fake_entropy_, ] = sess.run([ sample_mean, sample_cov, x, fake_mvn.event_shape_tensor(), fake_mvn.batch_shape_tensor(), fake_mvn.log_prob(x), fake_mvn.prob(x), fake_mvn.entropy(), ], feed_dict=feed_dict) self.assertAllClose(actual_mean, sample_mean_, atol=0.1, rtol=0.1) self.assertAllClose(actual_cov, sample_cov_, atol=0., rtol=0.1) # Ensure all other functions work as intended. self.assertAllEqual([5, 2, 3], x_.shape) self.assertAllEqual([3], fake_event_shape_) self.assertAllEqual([2], fake_batch_shape_) self.assertAllClose(actual_mvn_log_prob(x_), fake_log_prob_, atol=0., rtol=1e-6) self.assertAllClose(np.exp(actual_mvn_log_prob(x_)), fake_prob_, atol=0., rtol=1e-5) self.assertAllClose(actual_mvn_entropy, fake_entropy_, atol=0., rtol=1e-6) def testScalarBatchScalarEvent(self): self._testMVN( base_distribution_class=ds.Normal, base_distribution_kwargs={"loc": 0., "scale": 1.}, batch_shape=[2], event_shape=[3], not_implemented_message="not implemented when overriding event_shape") def testScalarBatchNonScalarEvent(self): self._testMVN( base_distribution_class=ds.MultivariateNormalDiag, base_distribution_kwargs={"loc": [0., 0., 0.], "scale_diag": [1., 1, 1]}, batch_shape=[2], not_implemented_message="not implemented") with self.test_session(): # Can't override event_shape for scalar batch, non-scalar event. with self.assertRaisesRegexp(ValueError, "base distribution not scalar"): self._cls()( distribution=ds.MultivariateNormalDiag(loc=[0.], scale_diag=[1.]), bijector=bs.Affine(shift=self._shift, scale_tril=self._tril), batch_shape=[2], event_shape=[3], validate_args=True) def testNonScalarBatchScalarEvent(self): self._testMVN( base_distribution_class=ds.Normal, base_distribution_kwargs={"loc": [0., 0], "scale": [1., 1]}, event_shape=[3], not_implemented_message="not implemented when overriding event_shape") with self.test_session(): # Can't override batch_shape for non-scalar batch, scalar event. with self.assertRaisesRegexp(ValueError, "base distribution not scalar"): self._cls()( distribution=ds.Normal(loc=[0.], scale=[1.]), bijector=bs.Affine(shift=self._shift, scale_tril=self._tril), batch_shape=[2], event_shape=[3], validate_args=True) def testNonScalarBatchNonScalarEvent(self): with self.test_session(): # Can't override event_shape and/or batch_shape for non_scalar batch, # non-scalar event. with self.assertRaisesRegexp(ValueError, "base distribution not scalar"): self._cls()( distribution=ds.MultivariateNormalDiag(loc=[[0.]], scale_diag=[[1.]]), bijector=bs.Affine(shift=self._shift, scale_tril=self._tril), batch_shape=[2], event_shape=[3], validate_args=True) if __name__ == "__main__": test.main()
npuichigo/ttsflow
third_party/tensorflow/tensorflow/contrib/distributions/python/kernel_tests/transformed_distribution_test.py
Python
apache-2.0
12,624
import decimal try: import thread except ImportError: import dummy_thread as thread from threading import local from django.conf import settings from django.db import DEFAULT_DB_ALIAS from django.db.backends import util from django.db.transaction import TransactionManagementError from django.utils import datetime_safe from django.utils.importlib import import_module class BaseDatabaseWrapper(local): """ Represents a database connection. """ ops = None vendor = 'unknown' def __init__(self, settings_dict, alias=DEFAULT_DB_ALIAS): # `settings_dict` should be a dictionary containing keys such as # NAME, USER, etc. It's called `settings_dict` instead of `settings` # to disambiguate it from Django settings modules. self.connection = None self.queries = [] self.settings_dict = settings_dict self.alias = alias self.use_debug_cursor = None # Transaction related attributes self.transaction_state = [] self.savepoint_state = 0 self._dirty = None def __eq__(self, other): return self.alias == other.alias def __ne__(self, other): return not self == other def _commit(self): if self.connection is not None: return self.connection.commit() def _rollback(self): if self.connection is not None: return self.connection.rollback() def _enter_transaction_management(self, managed): """ A hook for backend-specific changes required when entering manual transaction handling. """ pass def _leave_transaction_management(self, managed): """ A hook for backend-specific changes required when leaving manual transaction handling. Will usually be implemented only when _enter_transaction_management() is also required. """ pass def _savepoint(self, sid): if not self.features.uses_savepoints: return self.cursor().execute(self.ops.savepoint_create_sql(sid)) def _savepoint_rollback(self, sid): if not self.features.uses_savepoints: return self.cursor().execute(self.ops.savepoint_rollback_sql(sid)) def _savepoint_commit(self, sid): if not self.features.uses_savepoints: return self.cursor().execute(self.ops.savepoint_commit_sql(sid)) def enter_transaction_management(self, managed=True): """ Enters transaction management for a running thread. It must be balanced with the appropriate leave_transaction_management call, since the actual state is managed as a stack. The state and dirty flag are carried over from the surrounding block or from the settings, if there is no surrounding block (dirty is always false when no current block is running). """ if self.transaction_state: self.transaction_state.append(self.transaction_state[-1]) else: self.transaction_state.append(settings.TRANSACTIONS_MANAGED) if self._dirty is None: self._dirty = False self._enter_transaction_management(managed) def leave_transaction_management(self): """ Leaves transaction management for a running thread. A dirty flag is carried over to the surrounding block, as a commit will commit all changes, even those from outside. (Commits are on connection level.) """ self._leave_transaction_management(self.is_managed()) if self.transaction_state: del self.transaction_state[-1] else: raise TransactionManagementError("This code isn't under transaction " "management") if self._dirty: self.rollback() raise TransactionManagementError("Transaction managed block ended with " "pending COMMIT/ROLLBACK") self._dirty = False def is_dirty(self): """ Returns True if the current transaction requires a commit for changes to happen. """ return self._dirty def set_dirty(self): """ Sets a dirty flag for the current thread and code streak. This can be used to decide in a managed block of code to decide whether there are open changes waiting for commit. """ if self._dirty is not None: self._dirty = True else: raise TransactionManagementError("This code isn't under transaction " "management") def set_clean(self): """ Resets a dirty flag for the current thread and code streak. This can be used to decide in a managed block of code to decide whether a commit or rollback should happen. """ if self._dirty is not None: self._dirty = False else: raise TransactionManagementError("This code isn't under transaction management") self.clean_savepoints() def clean_savepoints(self): self.savepoint_state = 0 def is_managed(self): """ Checks whether the transaction manager is in manual or in auto state. """ if self.transaction_state: return self.transaction_state[-1] return settings.TRANSACTIONS_MANAGED def managed(self, flag=True): """ Puts the transaction manager into a manual state: managed transactions have to be committed explicitly by the user. If you switch off transaction management and there is a pending commit/rollback, the data will be commited. """ top = self.transaction_state if top: top[-1] = flag if not flag and self.is_dirty(): self._commit() self.set_clean() else: raise TransactionManagementError("This code isn't under transaction " "management") def commit_unless_managed(self): """ Commits changes if the system is not in managed transaction mode. """ if not self.is_managed(): self._commit() self.clean_savepoints() else: self.set_dirty() def rollback_unless_managed(self): """ Rolls back changes if the system is not in managed transaction mode. """ if not self.is_managed(): self._rollback() else: self.set_dirty() def commit(self): """ Does the commit itself and resets the dirty flag. """ self._commit() self.set_clean() def rollback(self): """ This function does the rollback itself and resets the dirty flag. """ self._rollback() self.set_clean() def savepoint(self): """ Creates a savepoint (if supported and required by the backend) inside the current transaction. Returns an identifier for the savepoint that will be used for the subsequent rollback or commit. """ thread_ident = thread.get_ident() self.savepoint_state += 1 tid = str(thread_ident).replace('-', '') sid = "s%s_x%d" % (tid, self.savepoint_state) self._savepoint(sid) return sid def savepoint_rollback(self, sid): """ Rolls back the most recent savepoint (if one exists). Does nothing if savepoints are not supported. """ if self.savepoint_state: self._savepoint_rollback(sid) def savepoint_commit(self, sid): """ Commits the most recent savepoint (if one exists). Does nothing if savepoints are not supported. """ if self.savepoint_state: self._savepoint_commit(sid) def close(self): if self.connection is not None: self.connection.close() self.connection = None def cursor(self): if (self.use_debug_cursor or (self.use_debug_cursor is None and settings.DEBUG)): cursor = self.make_debug_cursor(self._cursor()) else: cursor = util.CursorWrapper(self._cursor(), self) return cursor def make_debug_cursor(self, cursor): return util.CursorDebugWrapper(cursor, self) class BaseDatabaseFeatures(object): allows_group_by_pk = False # True if django.db.backend.utils.typecast_timestamp is used on values # returned from dates() calls. needs_datetime_string_cast = True empty_fetchmany_value = [] update_can_self_select = True # Does the backend distinguish between '' and None? interprets_empty_strings_as_nulls = False # Does the backend allow inserting duplicate rows when a unique_together # constraint exists, but one of the unique_together columns is NULL? ignores_nulls_in_unique_constraints = True can_use_chunked_reads = True can_return_id_from_insert = False uses_autocommit = False uses_savepoints = False # If True, don't use integer foreign keys referring to, e.g., positive # integer primary keys. related_fields_match_type = False allow_sliced_subqueries = True supports_joins = True distinguishes_insert_from_update = True supports_deleting_related_objects = True supports_select_related = True # Does the default test database allow multiple connections? # Usually an indication that the test database is in-memory test_db_allows_multiple_connections = True # Can an object be saved without an explicit primary key? supports_unspecified_pk = False # Can a fixture contain forward references? i.e., are # FK constraints checked at the end of transaction, or # at the end of each save operation? supports_forward_references = True # Does a dirty transaction need to be rolled back # before the cursor can be used again? requires_rollback_on_dirty_transaction = False # Does the backend allow very long model names without error? supports_long_model_names = True # Is there a REAL datatype in addition to floats/doubles? has_real_datatype = False supports_subqueries_in_group_by = True supports_bitwise_or = True # Do time/datetime fields have microsecond precision? supports_microsecond_precision = True # Does the __regex lookup support backreferencing and grouping? supports_regex_backreferencing = True # Can date/datetime lookups be performed using a string? supports_date_lookup_using_string = True # Can datetimes with timezones be used? supports_timezones = True # When performing a GROUP BY, is an ORDER BY NULL required # to remove any ordering? requires_explicit_null_ordering_when_grouping = False # Is there a 1000 item limit on query parameters? supports_1000_query_parameters = True # Can an object have a primary key of 0? MySQL says No. allows_primary_key_0 = True # Do we need to NULL a ForeignKey out, or can the constraint check be # deferred can_defer_constraint_checks = False # date_interval_sql can properly handle mixed Date/DateTime fields and timedeltas supports_mixed_date_datetime_comparisons = True # Features that need to be confirmed at runtime # Cache whether the confirmation has been performed. _confirmed = False supports_transactions = None supports_stddev = None can_introspect_foreign_keys = None def __init__(self, connection): self.connection = connection def confirm(self): "Perform manual checks of any database features that might vary between installs" self._confirmed = True self.supports_transactions = self._supports_transactions() self.supports_stddev = self._supports_stddev() self.can_introspect_foreign_keys = self._can_introspect_foreign_keys() def _supports_transactions(self): "Confirm support for transactions" cursor = self.connection.cursor() cursor.execute('CREATE TABLE ROLLBACK_TEST (X INT)') self.connection._commit() cursor.execute('INSERT INTO ROLLBACK_TEST (X) VALUES (8)') self.connection._rollback() cursor.execute('SELECT COUNT(X) FROM ROLLBACK_TEST') count, = cursor.fetchone() cursor.execute('DROP TABLE ROLLBACK_TEST') self.connection._commit() return count == 0 def _supports_stddev(self): "Confirm support for STDDEV and related stats functions" class StdDevPop(object): sql_function = 'STDDEV_POP' try: self.connection.ops.check_aggregate_support(StdDevPop()) except NotImplementedError: self.supports_stddev = False def _can_introspect_foreign_keys(self): "Confirm support for introspected foreign keys" # Every database can do this reliably, except MySQL, # which can't do it for MyISAM tables return True class BaseDatabaseOperations(object): """ This class encapsulates all backend-specific differences, such as the way a backend performs ordering or calculates the ID of a recently-inserted row. """ compiler_module = "django.db.models.sql.compiler" def __init__(self): self._cache = None def autoinc_sql(self, table, column): """ Returns any SQL needed to support auto-incrementing primary keys, or None if no SQL is necessary. This SQL is executed when a table is created. """ return None def date_extract_sql(self, lookup_type, field_name): """ Given a lookup_type of 'year', 'month' or 'day', returns the SQL that extracts a value from the given date field field_name. """ raise NotImplementedError() def date_interval_sql(self, sql, connector, timedelta): """ Implements the date interval functionality for expressions """ raise NotImplementedError() def date_trunc_sql(self, lookup_type, field_name): """ Given a lookup_type of 'year', 'month' or 'day', returns the SQL that truncates the given date field field_name to a DATE object with only the given specificity. """ raise NotImplementedError() def datetime_cast_sql(self): """ Returns the SQL necessary to cast a datetime value so that it will be retrieved as a Python datetime object instead of a string. This SQL should include a '%s' in place of the field's name. """ return "%s" def deferrable_sql(self): """ Returns the SQL necessary to make a constraint "initially deferred" during a CREATE TABLE statement. """ return '' def drop_foreignkey_sql(self): """ Returns the SQL command that drops a foreign key. """ return "DROP CONSTRAINT" def drop_sequence_sql(self, table): """ Returns any SQL necessary to drop the sequence for the given table. Returns None if no SQL is necessary. """ return None def fetch_returned_insert_id(self, cursor): """ Given a cursor object that has just performed an INSERT...RETURNING statement into a table that has an auto-incrementing ID, returns the newly created ID. """ return cursor.fetchone()[0] def field_cast_sql(self, db_type): """ Given a column type (e.g. 'BLOB', 'VARCHAR'), returns the SQL necessary to cast it before using it in a WHERE statement. Note that the resulting string should contain a '%s' placeholder for the column being searched against. """ return '%s' def force_no_ordering(self): """ Returns a list used in the "ORDER BY" clause to force no ordering at all. Returning an empty list means that nothing will be included in the ordering. """ return [] def fulltext_search_sql(self, field_name): """ Returns the SQL WHERE clause to use in order to perform a full-text search of the given field_name. Note that the resulting string should contain a '%s' placeholder for the value being searched against. """ raise NotImplementedError('Full-text search is not implemented for this database backend') def last_executed_query(self, cursor, sql, params): """ Returns a string of the query last executed by the given cursor, with placeholders replaced with actual values. `sql` is the raw query containing placeholders, and `params` is the sequence of parameters. These are used by default, but this method exists for database backends to provide a better implementation according to their own quoting schemes. """ from django.utils.encoding import smart_unicode, force_unicode # Convert params to contain Unicode values. to_unicode = lambda s: force_unicode(s, strings_only=True, errors='replace') if isinstance(params, (list, tuple)): u_params = tuple([to_unicode(val) for val in params]) else: u_params = dict([(to_unicode(k), to_unicode(v)) for k, v in params.items()]) return smart_unicode(sql) % u_params def last_insert_id(self, cursor, table_name, pk_name): """ Given a cursor object that has just performed an INSERT statement into a table that has an auto-incrementing ID, returns the newly created ID. This method also receives the table name and the name of the primary-key column. """ return cursor.lastrowid def lookup_cast(self, lookup_type): """ Returns the string to use in a query when performing lookups ("contains", "like", etc). The resulting string should contain a '%s' placeholder for the column being searched against. """ return "%s" def max_in_list_size(self): """ Returns the maximum number of items that can be passed in a single 'IN' list condition, or None if the backend does not impose a limit. """ return None def max_name_length(self): """ Returns the maximum length of table and column names, or None if there is no limit. """ return None def no_limit_value(self): """ Returns the value to use for the LIMIT when we are wanting "LIMIT infinity". Returns None if the limit clause can be omitted in this case. """ raise NotImplementedError def pk_default_value(self): """ Returns the value to use during an INSERT statement to specify that the field should use its default value. """ return 'DEFAULT' def process_clob(self, value): """ Returns the value of a CLOB column, for backends that return a locator object that requires additional processing. """ return value def return_insert_id(self): """ For backends that support returning the last insert ID as part of an insert query, this method returns the SQL and params to append to the INSERT query. The returned fragment should contain a format string to hold the appropriate column. """ pass def compiler(self, compiler_name): """ Returns the SQLCompiler class corresponding to the given name, in the namespace corresponding to the `compiler_module` attribute on this backend. """ if self._cache is None: self._cache = import_module(self.compiler_module) return getattr(self._cache, compiler_name) def quote_name(self, name): """ Returns a quoted version of the given table, index or column name. Does not quote the given name if it's already been quoted. """ raise NotImplementedError() def random_function_sql(self): """ Returns a SQL expression that returns a random value. """ return 'RANDOM()' def regex_lookup(self, lookup_type): """ Returns the string to use in a query when performing regular expression lookups (using "regex" or "iregex"). The resulting string should contain a '%s' placeholder for the column being searched against. If the feature is not supported (or part of it is not supported), a NotImplementedError exception can be raised. """ raise NotImplementedError def savepoint_create_sql(self, sid): """ Returns the SQL for starting a new savepoint. Only required if the "uses_savepoints" feature is True. The "sid" parameter is a string for the savepoint id. """ raise NotImplementedError def savepoint_commit_sql(self, sid): """ Returns the SQL for committing the given savepoint. """ raise NotImplementedError def savepoint_rollback_sql(self, sid): """ Returns the SQL for rolling back the given savepoint. """ raise NotImplementedError def sql_flush(self, style, tables, sequences): """ Returns a list of SQL statements required to remove all data from the given database tables (without actually removing the tables themselves). The `style` argument is a Style object as returned by either color_style() or no_style() in django.core.management.color. """ raise NotImplementedError() def sequence_reset_sql(self, style, model_list): """ Returns a list of the SQL statements required to reset sequences for the given models. The `style` argument is a Style object as returned by either color_style() or no_style() in django.core.management.color. """ return [] # No sequence reset required by default. def start_transaction_sql(self): """ Returns the SQL statement required to start a transaction. """ return "BEGIN;" def end_transaction_sql(self, success=True): if not success: return "ROLLBACK;" return "COMMIT;" def tablespace_sql(self, tablespace, inline=False): """ Returns the SQL that will be appended to tables or rows to define a tablespace. Returns '' if the backend doesn't use tablespaces. """ return '' def prep_for_like_query(self, x): """Prepares a value for use in a LIKE query.""" from django.utils.encoding import smart_unicode return smart_unicode(x).replace("\\", "\\\\").replace("%", "\%").replace("_", "\_") # Same as prep_for_like_query(), but called for "iexact" matches, which # need not necessarily be implemented using "LIKE" in the backend. prep_for_iexact_query = prep_for_like_query def value_to_db_auto(self, value): """ Transform a value to an object compatible with the auto field required by the backend driver for auto columns. """ if value is None: return None return int(value) def value_to_db_date(self, value): """ Transform a date value to an object compatible with what is expected by the backend driver for date columns. """ if value is None: return None return datetime_safe.new_date(value).strftime('%Y-%m-%d') def value_to_db_datetime(self, value): """ Transform a datetime value to an object compatible with what is expected by the backend driver for datetime columns. """ if value is None: return None return unicode(value) def value_to_db_time(self, value): """ Transform a datetime value to an object compatible with what is expected by the backend driver for time columns. """ if value is None: return None return unicode(value) def value_to_db_decimal(self, value, max_digits, decimal_places): """ Transform a decimal.Decimal value to an object compatible with what is expected by the backend driver for decimal (numeric) columns. """ if value is None: return None return util.format_number(value, max_digits, decimal_places) def year_lookup_bounds(self, value): """ Returns a two-elements list with the lower and upper bound to be used with a BETWEEN operator to query a field value using a year lookup `value` is an int, containing the looked-up year. """ first = '%s-01-01 00:00:00' second = '%s-12-31 23:59:59.999999' return [first % value, second % value] def year_lookup_bounds_for_date_field(self, value): """ Returns a two-elements list with the lower and upper bound to be used with a BETWEEN operator to query a DateField value using a year lookup `value` is an int, containing the looked-up year. By default, it just calls `self.year_lookup_bounds`. Some backends need this hook because on their DB date fields can't be compared to values which include a time part. """ return self.year_lookup_bounds(value) def convert_values(self, value, field): """Coerce the value returned by the database backend into a consistent type that is compatible with the field type. """ internal_type = field.get_internal_type() if internal_type == 'DecimalField': return value elif internal_type and internal_type.endswith('IntegerField') or internal_type == 'AutoField': return int(value) elif internal_type in ('DateField', 'DateTimeField', 'TimeField'): return value # No field, or the field isn't known to be a decimal or integer # Default to a float return float(value) def check_aggregate_support(self, aggregate_func): """Check that the backend supports the provided aggregate This is used on specific backends to rule out known aggregates that are known to have faulty implementations. If the named aggregate function has a known problem, the backend should raise NotImplemented. """ pass def combine_expression(self, connector, sub_expressions): """Combine a list of subexpressions into a single expression, using the provided connecting operator. This is required because operators can vary between backends (e.g., Oracle with %% and &) and between subexpression types (e.g., date expressions) """ conn = ' %s ' % connector return conn.join(sub_expressions) class BaseDatabaseIntrospection(object): """ This class encapsulates all backend-specific introspection utilities """ data_types_reverse = {} def __init__(self, connection): self.connection = connection def get_field_type(self, data_type, description): """Hook for a database backend to use the cursor description to match a Django field type to a database column. For Oracle, the column data_type on its own is insufficient to distinguish between a FloatField and IntegerField, for example.""" return self.data_types_reverse[data_type] def table_name_converter(self, name): """Apply a conversion to the name for the purposes of comparison. The default table name converter is for case sensitive comparison. """ return name def table_names(self): "Returns a list of names of all tables that exist in the database." cursor = self.connection.cursor() return self.get_table_list(cursor) def django_table_names(self, only_existing=False): """ Returns a list of all table names that have associated Django models and are in INSTALLED_APPS. If only_existing is True, the resulting list will only include the tables that actually exist in the database. """ from django.db import models, router tables = set() for app in models.get_apps(): for model in models.get_models(app): if not model._meta.managed: continue if not router.allow_syncdb(self.connection.alias, model): continue tables.add(model._meta.db_table) tables.update([f.m2m_db_table() for f in model._meta.local_many_to_many]) if only_existing: existing_tables = self.table_names() tables = [ t for t in tables if self.table_name_converter(t) in existing_tables ] return tables def installed_models(self, tables): "Returns a set of all models represented by the provided list of table names." from django.db import models, router all_models = [] for app in models.get_apps(): for model in models.get_models(app): if router.allow_syncdb(self.connection.alias, model): all_models.append(model) tables = map(self.table_name_converter, tables) return set([ m for m in all_models if self.table_name_converter(m._meta.db_table) in tables ]) def sequence_list(self): "Returns a list of information about all DB sequences for all models in all apps." from django.db import models, router apps = models.get_apps() sequence_list = [] for app in apps: for model in models.get_models(app): if not model._meta.managed: continue if not router.allow_syncdb(self.connection.alias, model): continue for f in model._meta.local_fields: if isinstance(f, models.AutoField): sequence_list.append({'table': model._meta.db_table, 'column': f.column}) break # Only one AutoField is allowed per model, so don't bother continuing. for f in model._meta.local_many_to_many: # If this is an m2m using an intermediate table, # we don't need to reset the sequence. if f.rel.through is None: sequence_list.append({'table': f.m2m_db_table(), 'column': None}) return sequence_list class BaseDatabaseClient(object): """ This class encapsulates all backend-specific methods for opening a client shell. """ # This should be a string representing the name of the executable # (e.g., "psql"). Subclasses must override this. executable_name = None def __init__(self, connection): # connection is an instance of BaseDatabaseWrapper. self.connection = connection def runshell(self): raise NotImplementedError() class BaseDatabaseValidation(object): """ This class encapsualtes all backend-specific model validation. """ def __init__(self, connection): self.connection = connection def validate_field(self, errors, opts, f): "By default, there is no backend-specific validation" pass
liqi328/rjrepaircompany
django/db/backends/__init__.py
Python
bsd-3-clause
31,617
def foo(): pass \ \ \
akosyakov/intellij-community
python/testData/formatter/trailingBlankLinesWithBackslashesAtFunctionEndNoNewLine.py
Python
apache-2.0
25
from django.conf import settings from django.core.mail import send_mail from django.core.urlresolvers import reverse def send_validation(strategy, backend, code): url = '{0}?verification_code={1}'.format( reverse('social:complete', args=(backend.name,)), code.code ) url = strategy.request.build_absolute_uri(url) send_mail('Validate your account', 'Validate your account {0}'.format(url), settings.EMAIL_FROM, [code.email], fail_silently=False)
aneumeier/userprofile
userprofile/mail.py
Python
mit
494
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function from pyspark import SparkContext # $example on$ from pyspark.mllib.stat import Statistics # $example off$ if __name__ == "__main__": sc = SparkContext(appName="HypothesisTestingKolmogorovSmirnovTestExample") # $example on$ parallelData = sc.parallelize([0.1, 0.15, 0.2, 0.3, 0.25]) # run a KS test for the sample versus a standard normal distribution testResult = Statistics.kolmogorovSmirnovTest(parallelData, "norm", 0, 1) # summary of the test including the p-value, test statistic, and null hypothesis # if our p-value indicates significance, we can reject the null hypothesis # Note that the Scala functionality of calling Statistics.kolmogorovSmirnovTest with # a lambda to calculate the CDF is not made available in the Python API print(testResult) # $example off$ sc.stop()
fharenheit/template-spark-app
src/main/python/mllib/hypothesis_testing_kolmogorov_smirnov_test_example.py
Python
apache-2.0
1,658
# coding: utf8 { '!langcode!': 'fr', '!langname!': 'Français', '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" est une expression optionnelle comme "champ1=\'nouvellevaleur\'". Vous ne pouvez mettre à jour ou supprimer les résultats d\'un JOIN', '%s %%{row} deleted': '%s lignes supprimées', '%s %%{row} updated': '%s lignes mises à jour', '%s selected': '%s sélectionné', '%Y-%m-%d': '%Y-%m-%d', '%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S', 'About': 'À propos', 'Access Control': "Contrôle d'accès", 'Administrative Interface': "Interface d'administration", 'Administrative interface': "Interface d'administration", 'Ajax Recipes': 'Recettes Ajax', 'appadmin is disabled because insecure channel': "appadmin est désactivée parce que le canal n'est pas sécurisé", 'Are you sure you want to delete this object?': 'Êtes-vous sûr de vouloir supprimer cet objet?', 'Authentication': 'Authentification', 'Available Databases and Tables': 'Bases de données et tables disponibles', 'Buy this book': 'Acheter ce livre', 'cache': 'cache', 'Cache': 'Cache', 'Cache Keys': 'Clés de cache', 'Cannot be empty': 'Ne peut pas être vide', 'change password': 'changer le mot de passe', 'Check to delete': 'Cliquez pour supprimer', 'Check to delete:': 'Cliquez pour supprimer:', 'Clear CACHE?': 'Vider le CACHE?', 'Clear DISK': 'Vider le DISQUE', 'Clear RAM': 'Vider la RAM', 'Client IP': 'IP client', 'Community': 'Communauté', 'Components and Plugins': 'Composants et Plugins', 'Controller': 'Contrôleur', 'Copyright': 'Copyright', 'Created By': 'Créé par', 'Created On': 'Créé le', 'Current request': 'Demande actuelle', 'Current response': 'Réponse actuelle', 'Current session': 'Session en cours', 'customize me!': 'personnalisez-moi!', 'data uploaded': 'données téléchargées', 'Database': 'base de données', 'Database %s select': 'base de données %s selectionnée', 'db': 'bdd', 'DB Model': 'Modèle BDD', 'Delete:': 'Supprimer:', 'Demo': 'Démo', 'Deployment Recipes': 'Recettes de déploiement', 'Description': 'Description', 'design': 'design', 'DISK': 'DISQUE', 'Disk Cache Keys': 'Clés de cache du disque', 'Disk Cleared': 'Disque vidé', 'Documentation': 'Documentation', "Don't know what to do?": 'Vous ne savez pas quoi faire?', 'done!': 'fait!', 'Download': 'Téléchargement', 'E-mail': 'E-mail', 'Edit': 'Éditer', 'Edit current record': "Modifier l'enregistrement courant", 'edit profile': 'modifier le profil', 'Edit This App': 'Modifier cette application', 'Email and SMS': 'Email et SMS', 'enter an integer between %(min)g and %(max)g': 'entrez un entier entre %(min)g et %(max)g', 'Errors': 'Erreurs', 'export as csv file': 'exporter sous forme de fichier csv', 'FAQ': 'FAQ', 'First name': 'Prénom', 'Forms and Validators': 'Formulaires et Validateurs', 'Free Applications': 'Applications gratuites', 'Function disabled': 'Fonction désactivée', 'Group ID': 'Groupe ID', 'Groups': 'Groupes', 'Hello World': 'Bonjour le monde', 'Home': 'Accueil', 'How did you get here?': 'Comment êtes-vous arrivé ici?', 'import': 'import', 'Import/Export': 'Importer/Exporter', 'Index': 'Index', 'insert new': 'insérer un nouveau', 'insert new %s': 'insérer un nouveau %s', 'Internal State': 'État interne', 'Introduction': 'Introduction', 'Invalid email': 'E-mail invalide', 'Invalid Query': 'Requête Invalide', 'invalid request': 'requête invalide', 'Is Active': 'Est actif', 'Key': 'Clé', 'Last name': 'Nom', 'Layout': 'Mise en page', 'Layout Plugins': 'Plugins de mise en page', 'Layouts': 'Mises en page', 'Live chat': 'Chat en direct', 'Live Chat': 'Chat en direct', 'login': 'connectez-vous', 'Login': 'Connectez-vous', 'logout': 'déconnectez-vous', 'lost password': 'mot de passe perdu', 'Lost Password': 'Mot de passe perdu', 'Lost password?': 'Mot de passe perdu?', 'lost password?': 'mot de passe perdu?', 'Main Menu': 'Menu principal', 'Manage Cache': 'Gérer le Cache', 'Menu Model': 'Menu modèle', 'Modified By': 'Modifié par', 'Modified On': 'Modifié le', 'My Sites': 'Mes sites', 'Name': 'Nom', 'New Record': 'Nouvel enregistrement', 'new record inserted': 'nouvel enregistrement inséré', 'next 100 rows': '100 prochaines lignes', 'No databases in this application': "Cette application n'a pas de bases de données", 'Object or table name': 'Objet ou nom de table', 'Online examples': 'Exemples en ligne', 'or import from csv file': "ou importer d'un fichier CSV", 'Origin': 'Origine', 'Other Plugins': 'Autres Plugins', 'Other Recipes': 'Autres recettes', 'Overview': 'Présentation', 'Password': 'Mot de passe', "Password fields don't match": 'Les mots de passe ne correspondent pas', 'Plugins': 'Plugins', 'Powered by': 'Alimenté par', 'Preface': 'Préface', 'previous 100 rows': '100 lignes précédentes', 'Python': 'Python', 'Query:': 'Requête:', 'Quick Examples': 'Exemples Rapides', 'RAM': 'RAM', 'RAM Cache Keys': 'Clés de cache de la RAM', 'Ram Cleared': 'Ram vidée', 'Readme': 'Lisez-moi', 'Recipes': 'Recettes', 'Record': 'enregistrement', 'record does not exist': "l'archive n'existe pas", 'Record ID': "ID d'enregistrement", 'Record id': "id d'enregistrement", 'Register': "S'inscrire", 'register': "s'inscrire", 'Registration identifier': "Identifiant d'enregistrement", 'Registration key': "Clé d'enregistrement", 'Remember me (for 30 days)': 'Se souvenir de moi (pendant 30 jours)', 'Request reset password': 'Demande de réinitialiser le mot clé', 'Reset Password key': 'Réinitialiser le mot clé', 'Resources': 'Ressources', 'Role': 'Rôle', 'Rows in Table': 'Lignes du tableau', 'Rows selected': 'Lignes sélectionnées', 'Semantic': 'Sémantique', 'Services': 'Services', 'Size of cache:': 'Taille du cache:', 'state': 'état', 'Statistics': 'Statistiques', 'Stylesheet': 'Feuille de style', 'submit': 'soumettre', 'Submit': 'Soumettre', 'Support': 'Support', 'Sure you want to delete this object?': 'Êtes-vous sûr de vouloir supprimer cet objet?', 'Table': 'tableau', 'Table name': 'Nom du tableau', 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'La "requête" est une condition comme "db.table1.champ1==\'valeur\'". Quelque chose comme "db.table1.champ1==db.table2.champ2" résulte en un JOIN SQL.', 'The Core': 'Le noyau', 'The output of the file is a dictionary that was rendered by the view %s': 'La sortie de ce fichier est un dictionnaire qui été restitué par la vue %s', 'The Views': 'Les Vues', 'This App': 'Cette Appli', 'This is a copy of the scaffolding application': "Ceci est une copie de l'application échafaudage", 'Time in Cache (h:m:s)': 'Temps en Cache (h:m:s)', 'Timestamp': 'Horodatage', 'Twitter': 'Twitter', 'unable to parse csv file': "incapable d'analyser le fichier cvs", 'Update:': 'Mise à jour:', 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Employez (...)&(...) pour AND, (...)|(...) pour OR, and ~(...) pour NOT afin de construire des requêtes plus complexes.', 'User %(id)s Logged-in': 'Utilisateur %(id)s connecté', 'User %(id)s Registered': 'Utilisateur %(id)s enregistré', 'User ID': 'ID utilisateur', 'User Voice': "Voix de l'utilisateur", 'Verify Password': 'Vérifiez le mot de passe', 'Videos': 'Vidéos', 'View': 'Présentation', 'Web2py': 'Web2py', 'Welcome': 'Bienvenue', 'Welcome %s': 'Bienvenue %s', 'Welcome to web2py': 'Bienvenue à web2py', 'Welcome to web2py!': 'Bienvenue à web2py!', 'Which called the function %s located in the file %s': 'Qui a appelé la fonction %s se trouvant dans le fichier %s', 'You are successfully running web2py': 'Vous exécutez avec succès web2py', 'You can modify this application and adapt it to your needs': "Vous pouvez modifier cette application et l'adapter à vos besoins", 'You visited the url %s': "Vous avez visité l'URL %s", }
pouyana/teireader
webui/applications/grid/languages/fr.py
Python
mit
7,935
# -*- coding: utf-8 -*- from django.contrib import admin from django.db import models class Band(models.Model): name = models.CharField(max_length=100) bio = models.TextField() rank = models.IntegerField() class Meta: ordering = ('name',) class Song(models.Model): band = models.ForeignKey(Band) name = models.CharField(max_length=100) duration = models.IntegerField() other_interpreters = models.ManyToManyField(Band, related_name='covers') class Meta: ordering = ('name',) class SongInlineDefaultOrdering(admin.StackedInline): model = Song class SongInlineNewOrdering(admin.StackedInline): model = Song ordering = ('duration', ) class DynOrderingBandAdmin(admin.ModelAdmin): def get_ordering(self, request): if request.user.is_superuser: return ['rank'] else: return ['name']
oinopion/django
tests/admin_ordering/models.py
Python
bsd-3-clause
899
class Token(object): def __init__(self, start_mark, end_mark): self.start_mark = start_mark self.end_mark = end_mark def __repr__(self): attributes = [key for key in self.__dict__ if not key.endswith('_mark')] attributes.sort() arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) for key in attributes]) return '%s(%s)' % (self.__class__.__name__, arguments) #class BOMToken(Token): # id = '<byte order mark>' class DirectiveToken(Token): id = '<directive>' def __init__(self, name, value, start_mark, end_mark): self.name = name self.value = value self.start_mark = start_mark self.end_mark = end_mark class DocumentStartToken(Token): id = '<document start>' class DocumentEndToken(Token): id = '<document end>' class StreamStartToken(Token): id = '<stream start>' def __init__(self, start_mark=None, end_mark=None, encoding=None): self.start_mark = start_mark self.end_mark = end_mark self.encoding = encoding class StreamEndToken(Token): id = '<stream end>' class BlockSequenceStartToken(Token): id = '<block sequence start>' class BlockMappingStartToken(Token): id = '<block mapping start>' class BlockEndToken(Token): id = '<block end>' class FlowSequenceStartToken(Token): id = '[' class FlowMappingStartToken(Token): id = '{' class FlowSequenceEndToken(Token): id = ']' class FlowMappingEndToken(Token): id = '}' class KeyToken(Token): id = '?' class ValueToken(Token): id = ':' class BlockEntryToken(Token): id = '-' class FlowEntryToken(Token): id = ',' class AliasToken(Token): id = '<alias>' def __init__(self, value, start_mark, end_mark): self.value = value self.start_mark = start_mark self.end_mark = end_mark class AnchorToken(Token): id = '<anchor>' def __init__(self, value, start_mark, end_mark): self.value = value self.start_mark = start_mark self.end_mark = end_mark class TagToken(Token): id = '<tag>' def __init__(self, value, start_mark, end_mark): self.value = value self.start_mark = start_mark self.end_mark = end_mark class ScalarToken(Token): id = '<scalar>' def __init__(self, value, plain, start_mark, end_mark, style=None): self.value = value self.plain = plain self.start_mark = start_mark self.end_mark = end_mark self.style = style
cortext/crawtextV2
~/venvs/crawler/lib/python2.7/site-packages/yaml/tokens.py
Python
mit
2,573
#!/usr/bin/env python # -*- coding: utf-8 -*- # # pyramid_sms documentation build configuration file, created by # sphinx-quickstart on Tue Jul 9 22:26:36 2013. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os # If extensions (or modules to document with autodoc) are in another # directory, add these directories to sys.path here. If the directory is # relative to the documentation root, use os.path.abspath to make it # absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # Get the project root dir, which is the parent dir of this cwd = os.getcwd() project_root = os.path.dirname(cwd) # Insert the project root dir as the first element in the PYTHONPATH. # This lets us ensure that the source package is imported, and that its # version is used. sys.path.insert(0, project_root) import pyramid_sms # -- General configuration --------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'SMS for Pyramid' copyright = u'2016, Mikko Ohtamaa' # The version info for the project you're documenting, acts as replacement # for |version| and |release|, also used in various other places throughout # the built documents. # # The short X.Y version. version = "0.1" # The full version, including alpha/beta/rc tags. release = "0.1" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to # some non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built # documents. #keep_warnings = False # -- Options for HTML output ------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a # theme further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as # html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the # top of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon # of the docs. This file should be a Windows icon file (.ico) being # 16x16 or 32x32 pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) # here, relative to this directory. They are copied after the builtin # static files, so a file named "default.css" will overwrite the builtin # "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names # to template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. # Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. # Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages # will contain a <link> tag referring to it. The value of this option # must be the base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'pyramid_smsdoc' # -- Options for LaTeX output ------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', 'pyramid_sms.tex', u'SMS for Pyramid Documentation', u'Mikko Ohtamaa', 'manual'), ] # The name of an image file (relative to this directory) to place at # the top of the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings # are parts, not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output ------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'pyramid_sms', u'SMS for Pyramid Documentation', [u'Mikko Ohtamaa'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ---------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'pyramid_sms', u'SMS for Pyramid Documentation', u'Mikko Ohtamaa', 'pyramid_sms', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False autoclass_content = "both"
websauna/pyramid_sms
docs/conf.py
Python
isc
8,451
# coding=utf-8 from django.utils.functional import SimpleLazyObject from mongo_auth import get_user as mongo_auth_get_user def get_user(request): if not hasattr(request, '_cached_user'): request._cached_user = mongo_auth_get_user(request) return request._cached_user class AuthenticationMiddleware(object): def process_request(self, request): assert hasattr(request, 'session'), ( "The Django authentication middleware requires session middleware " "to be installed. Edit your MIDDLEWARE_CLASSES setting to insert " "'django.contrib.sessions.middleware.SessionMiddleware' before " "'django.contrib.auth.middleware.AuthenticationMiddleware'." ) request.user = SimpleLazyObject(lambda: get_user(request)) class SessionAuthenticationMiddleware(object): """ Formerly, a middleware for invalidating a user's sessions that don't correspond to the user's current session authentication hash. However, it caused the "Vary: Cookie" header on all responses. Now a backwards compatibility shim that enables session verification in auth.get_user() if this middleware is in MIDDLEWARE_CLASSES. """ def process_request(self, request): pass
sv1jsb/django-angular
mongo_auth/middleware.py
Python
isc
1,264
# -*- coding: utf-8 -*- """The initialization file for the Pywikibot framework.""" # # (C) Pywikibot team, 2008-2014 # # Distributed under the terms of the MIT license. # from __future__ import unicode_literals __release__ = '2.0rc4' __version__ = '$Id: e26392a530582f286edf2d99e729218b2e93405e $' import datetime import math import re import sys import threading import json if sys.version_info[0] > 2: from queue import Queue long = int else: from Queue import Queue from warnings import warn # Use pywikibot. prefix for all in-package imports; this is to prevent # confusion with similarly-named modules in version 1 framework, for users # who want to continue using both from pywikibot import config2 as config from pywikibot.bot import ( output, warning, error, critical, debug, stdout, exception, input, input_choice, input_yn, inputChoice, handle_args, showHelp, ui, log, calledModuleName, Bot, CurrentPageBot, WikidataBot, QuitKeyboardInterrupt, # the following are flagged as deprecated on usage handleArgs, ) from pywikibot.exceptions import ( Error, InvalidTitle, BadTitle, NoPage, SectionError, SiteDefinitionError, NoSuchSite, UnknownSite, UnknownFamily, UnknownExtension, NoUsername, UserBlocked, PageRelatedError, IsRedirectPage, IsNotRedirectPage, PageSaveRelatedError, PageNotSaved, OtherPageSaveError, LockedPage, CascadeLockedPage, LockedNoPage, NoCreateError, EditConflict, PageDeletedConflict, PageCreatedConflict, ServerError, FatalServerError, Server504Error, CaptchaError, SpamfilterError, CircularRedirect, InterwikiRedirectPage, WikiBaseError, CoordinateGlobeUnknownException, ) from pywikibot.tools import UnicodeMixin, redirect_func from pywikibot.i18n import translate from pywikibot.data.api import UploadWarning from pywikibot.diff import PatchManager import pywikibot.textlib as textlib import pywikibot.tools textlib_methods = ( 'unescape', 'replaceExcept', 'removeDisabledParts', 'removeHTMLParts', 'isDisabled', 'interwikiFormat', 'interwikiSort', 'getLanguageLinks', 'replaceLanguageLinks', 'removeLanguageLinks', 'removeLanguageLinksAndSeparator', 'getCategoryLinks', 'categoryFormat', 'replaceCategoryLinks', 'removeCategoryLinks', 'removeCategoryLinksAndSeparator', 'replaceCategoryInPlace', 'compileLinkR', 'extract_templates_and_params', 'TimeStripper', ) # pep257 doesn't understand when the first entry is on the next line __all__ = ('config', 'ui', 'UnicodeMixin', 'translate', 'Page', 'FilePage', 'Category', 'Link', 'User', 'ItemPage', 'PropertyPage', 'Claim', 'html2unicode', 'url2unicode', 'unicode2html', 'stdout', 'output', 'warning', 'error', 'critical', 'debug', 'exception', 'input_choice', 'input', 'input_yn', 'inputChoice', 'handle_args', 'handleArgs', 'showHelp', 'ui', 'log', 'calledModuleName', 'Bot', 'CurrentPageBot', 'WikidataBot', 'Error', 'InvalidTitle', 'BadTitle', 'NoPage', 'SectionError', 'SiteDefinitionError', 'NoSuchSite', 'UnknownSite', 'UnknownFamily', 'UnknownExtension', 'NoUsername', 'UserBlocked', 'UserActionRefuse', 'PageRelatedError', 'IsRedirectPage', 'IsNotRedirectPage', 'PageSaveRelatedError', 'PageNotSaved', 'OtherPageSaveError', 'LockedPage', 'CascadeLockedPage', 'LockedNoPage', 'NoCreateError', 'EditConflict', 'PageDeletedConflict', 'PageCreatedConflict', 'UploadWarning', 'ServerError', 'FatalServerError', 'Server504Error', 'CaptchaError', 'SpamfilterError', 'CircularRedirect', 'InterwikiRedirectPage', 'WikiBaseError', 'CoordinateGlobeUnknownException', 'QuitKeyboardInterrupt', ) # flake8 is unable to detect concatenation in the same operation # like: # ) + textlib_methods # pep257 also doesn't support __all__ multiple times in a document # so instead use this trick globals()['__all__'] = globals()['__all__'] + textlib_methods if sys.version_info[0] == 2: # T111615: Python 2 requires __all__ is bytes globals()['__all__'] = tuple(bytes(item) for item in __all__) for _name in textlib_methods: target = getattr(textlib, _name) wrapped_func = redirect_func(target) globals()[_name] = wrapped_func deprecated = redirect_func(pywikibot.tools.deprecated) deprecate_arg = redirect_func(pywikibot.tools.deprecate_arg) class Timestamp(datetime.datetime): """Class for handling MediaWiki timestamps. This inherits from datetime.datetime, so it can use all of the methods and operations of a datetime object. To ensure that the results of any operation are also a Timestamp object, be sure to use only Timestamp objects (and datetime.timedeltas) in any operation. Use Timestamp.fromISOformat() and Timestamp.fromtimestampformat() to create Timestamp objects from MediaWiki string formats. As these constructors are typically used to create objects using data passed provided by site and page methods, some of which return a Timestamp when previously they returned a MediaWiki string representation, these methods also accept a Timestamp object, in which case they return a clone. Use Site.getcurrenttime() for the current time; this is more reliable than using Timestamp.utcnow(). """ mediawikiTSFormat = "%Y%m%d%H%M%S" ISO8601Format = "%Y-%m-%dT%H:%M:%SZ" def clone(self): """Clone this instance.""" return self.replace(microsecond=self.microsecond) @classmethod def fromISOformat(cls, ts): """Convert an ISO 8601 timestamp to a Timestamp object.""" # If inadvertantly passed a Timestamp object, use replace() # to create a clone. if isinstance(ts, cls): return ts.clone() return cls.strptime(ts, cls.ISO8601Format) @classmethod def fromtimestampformat(cls, ts): """Convert a MediaWiki internal timestamp to a Timestamp object.""" # If inadvertantly passed a Timestamp object, use replace() # to create a clone. if isinstance(ts, cls): return ts.clone() return cls.strptime(ts, cls.mediawikiTSFormat) def isoformat(self): """ Convert object to an ISO 8601 timestamp accepted by MediaWiki. datetime.datetime.isoformat does not postfix the ISO formatted date with a 'Z' unless a timezone is included, which causes MediaWiki ~1.19 and earlier to fail. """ return self.strftime(self.ISO8601Format) toISOformat = redirect_func(isoformat, old_name='toISOformat', class_name='Timestamp') def totimestampformat(self): """Convert object to a MediaWiki internal timestamp.""" return self.strftime(self.mediawikiTSFormat) def __str__(self): """Return a string format recognized by the API.""" return self.isoformat() def __add__(self, other): """Perform addition, returning a Timestamp instead of datetime.""" newdt = super(Timestamp, self).__add__(other) if isinstance(newdt, datetime.datetime): return Timestamp(newdt.year, newdt.month, newdt.day, newdt.hour, newdt.minute, newdt.second, newdt.microsecond, newdt.tzinfo) else: return newdt def __sub__(self, other): """Perform substraction, returning a Timestamp instead of datetime.""" newdt = super(Timestamp, self).__sub__(other) if isinstance(newdt, datetime.datetime): return Timestamp(newdt.year, newdt.month, newdt.day, newdt.hour, newdt.minute, newdt.second, newdt.microsecond, newdt.tzinfo) else: return newdt class Coordinate(object): """ Class for handling and storing Coordinates. For now its just being used for DataSite, but in the future we can use it for the GeoData extension. """ def __init__(self, lat, lon, alt=None, precision=None, globe='earth', typ="", name="", dim=None, site=None, entity=''): """ Represent a geo coordinate. @param lat: Latitude @type lat: float @param lon: Longitude @type lon: float @param alt: Altitute? TODO FIXME @param precision: precision @type precision: float @param globe: Which globe the point is on @type globe: str @param typ: The type of coordinate point @type typ: str @param name: The name @type name: str @param dim: Dimension (in meters) @type dim: int @param entity: The URL entity of a Wikibase item @type entity: str """ self.lat = lat self.lon = lon self.alt = alt self._precision = precision if globe: globe = globe.lower() self.globe = globe self._entity = entity self.type = typ self.name = name self._dim = dim if not site: self.site = Site().data_repository() else: self.site = site def __repr__(self): string = 'Coordinate(%s, %s' % (self.lat, self.lon) if self.globe != 'earth': string += ', globe="%s"' % self.globe string += ')' return string @property def entity(self): if self._entity: return self._entity return self.site.globes()[self.globe] def toWikibase(self): """ Export the data to a JSON object for the Wikibase API. FIXME: Should this be in the DataSite object? """ if self.globe not in self.site.globes(): raise CoordinateGlobeUnknownException( u"%s is not supported in Wikibase yet." % self.globe) return {'latitude': self.lat, 'longitude': self.lon, 'altitude': self.alt, 'globe': self.entity, 'precision': self.precision, } @classmethod def fromWikibase(cls, data, site): """Constructor to create an object from Wikibase's JSON output.""" globes = {} for k in site.globes(): globes[site.globes()[k]] = k globekey = data['globe'] if globekey: globe = globes.get(data['globe']) else: # Default to earth or should we use None here? globe = 'earth' return cls(data['latitude'], data['longitude'], data['altitude'], data['precision'], globe, site=site, entity=data['globe']) @property def precision(self): u""" Return the precision of the geo coordinate. The biggest error (in degrees) will be given by the longitudinal error; the same error in meters becomes larger (in degrees) further up north. We can thus ignore the latitudinal error. The longitudinal can be derived as follows: In small angle approximation (and thus in radians): M{Δλ ≈ Δpos / r_φ}, where r_φ is the radius of earth at the given latitude. Δλ is the error in longitude. M{r_φ = r cos φ}, where r is the radius of earth, φ the latitude Therefore:: precision = math.degrees(self._dim/(radius*math.cos(math.radians(self.lat)))) """ if not self._precision: radius = 6378137 # TODO: Support other globes self._precision = math.degrees( self._dim / (radius * math.cos(math.radians(self.lat)))) return self._precision def precisionToDim(self): """Convert precision from Wikibase to GeoData's dim.""" raise NotImplementedError class WbTime(object): """A Wikibase time representation.""" PRECISION = {'1000000000': 0, '100000000': 1, '10000000': 2, '1000000': 3, '100000': 4, '10000': 5, 'millenia': 6, 'century': 7, 'decade': 8, 'year': 9, 'month': 10, 'day': 11, 'hour': 12, 'minute': 13, 'second': 14 } FORMATSTR = '{0:+012d}-{1:02d}-{2:02d}T{3:02d}:{4:02d}:{5:02d}Z' def __init__(self, year=None, month=None, day=None, hour=None, minute=None, second=None, precision=None, before=0, after=0, timezone=0, calendarmodel=None, site=None): """ Create a new WbTime object. The precision can be set by the Wikibase int value (0-14) or by a human readable string, e.g., 'hour'. If no precision is given, it is set according to the given time units. """ if year is None: raise ValueError('no year given') self.precision = self.PRECISION['second'] if second is None: self.precision = self.PRECISION['minute'] second = 0 if minute is None: self.precision = self.PRECISION['hour'] minute = 0 if hour is None: self.precision = self.PRECISION['day'] hour = 0 if day is None: self.precision = self.PRECISION['month'] day = 1 if month is None: self.precision = self.PRECISION['year'] month = 1 self.year = long(year) self.month = month self.day = day self.hour = hour self.minute = minute self.second = second self.after = after self.before = before self.timezone = timezone if calendarmodel is None: if site is None: site = Site().data_repository() calendarmodel = site.calendarmodel() self.calendarmodel = calendarmodel # if precision is given it overwrites the autodetection above if precision is not None: if (isinstance(precision, int) and precision in self.PRECISION.values()): self.precision = precision elif precision in self.PRECISION: self.precision = self.PRECISION[precision] else: raise ValueError('Invalid precision: "%s"' % precision) @classmethod def fromTimestr(cls, datetimestr, precision=14, before=0, after=0, timezone=0, calendarmodel=None, site=None): match = re.match(r'([-+]?\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+)Z', datetimestr) if not match: raise ValueError(u"Invalid format: '%s'" % datetimestr) t = match.groups() return cls(long(t[0]), int(t[1]), int(t[2]), int(t[3]), int(t[4]), int(t[5]), precision, before, after, timezone, calendarmodel, site) def toTimestr(self): """ Convert the data to a UTC date/time string. @return: str """ return self.FORMATSTR.format(self.year, self.month, self.day, self.hour, self.minute, self.second) def toWikibase(self): """ Convert the data to a JSON object for the Wikibase API. @return: dict """ json = {'time': self.toTimestr(), 'precision': self.precision, 'after': self.after, 'before': self.before, 'timezone': self.timezone, 'calendarmodel': self.calendarmodel } return json @classmethod def fromWikibase(cls, ts): return cls.fromTimestr(ts[u'time'], ts[u'precision'], ts[u'before'], ts[u'after'], ts[u'timezone'], ts[u'calendarmodel']) def __str__(self): return json.dumps(self.toWikibase(), indent=4, sort_keys=True, separators=(',', ': ')) def __eq__(self, other): return self.__dict__ == other.__dict__ def __repr__(self): return u"WbTime(year=%(year)d, month=%(month)d, day=%(day)d, " \ u"hour=%(hour)d, minute=%(minute)d, second=%(second)d, " \ u"precision=%(precision)d, before=%(before)d, after=%(after)d, " \ u"timezone=%(timezone)d, calendarmodel='%(calendarmodel)s')" \ % self.__dict__ class WbQuantity(object): """A Wikibase quantity representation.""" def __init__(self, amount, unit=None, error=None): u""" Create a new WbQuantity object. @param amount: number representing this quantity @type amount: float @param unit: not used (only unit-less quantities are supported) @param error: the uncertainty of the amount (e.g. ±1) @type error: float, or tuple of two floats, where the first value is the upper error and the second is the lower error value. """ if amount is None: raise ValueError('no amount given') if unit is None: unit = '1' self.amount = amount self.unit = unit upperError = lowerError = 0 if isinstance(error, tuple): upperError, lowerError = error elif error is not None: upperError = lowerError = error self.upperBound = self.amount + upperError self.lowerBound = self.amount - lowerError def toWikibase(self): """Convert the data to a JSON object for the Wikibase API.""" json = {'amount': self.amount, 'upperBound': self.upperBound, 'lowerBound': self.lowerBound, 'unit': self.unit } return json @classmethod def fromWikibase(cls, wb): """ Create a WbQuanity from the JSON data given by the Wikibase API. @param wb: Wikibase JSON """ amount = eval(wb['amount']) upperBound = eval(wb['upperBound']) lowerBound = eval(wb['lowerBound']) error = (upperBound - amount, amount - lowerBound) return cls(amount, wb['unit'], error) def __str__(self): return json.dumps(self.toWikibase(), indent=4, sort_keys=True, separators=(',', ': ')) def __eq__(self, other): return self.__dict__ == other.__dict__ def __repr__(self): return (u"WbQuantity(amount=%(amount)s, upperBound=%(upperBound)s, " u"lowerBound=%(lowerBound)s, unit=%(unit)s)" % self.__dict__) _sites = {} _url_cache = {} # The code/fam pair for each URL def Site(code=None, fam=None, user=None, sysop=None, interface=None, url=None): """A factory method to obtain a Site object. Site objects are cached and reused by this method. By default rely on config settings. These defaults may all be overridden using the method parameters. @param code: language code (override config.mylang) @type code: string @param fam: family name or object (override config.family) @type fam: string or Family @param user: bot user name to use on this site (override config.usernames) @type user: unicode @param sysop: sysop user to use on this site (override config.sysopnames) @type sysop: unicode @param interface: site class or name of class in pywikibot.site (override config.site_interface) @type interface: subclass of L{pywikibot.site.BaseSite} or string @param url: Instead of code and fam, does try to get a Site based on the URL. Still requires that the family supporting that URL exists. @type url: string """ # Either code and fam or only url assert(not url or (not code and not fam)) _logger = "wiki" if url: if url in _url_cache: cached = _url_cache[url] if cached: code = cached[0] fam = cached[1] else: raise SiteDefinitionError("Unknown URL '{0}'.".format(url)) else: # Iterate through all families and look, which does apply to # the given URL for fam in config.family_files: try: family = pywikibot.family.Family.load(fam) code = family.from_url(url) if code: _url_cache[url] = (code, fam) break except Exception as e: pywikibot.warning('Error in Family(%s).from_url: %s' % (fam, e)) else: _url_cache[url] = None # TODO: As soon as AutoFamily is ready, try and use an # AutoFamily raise SiteDefinitionError("Unknown URL '{0}'.".format(url)) else: # Fallback to config defaults code = code or config.mylang fam = fam or config.family interface = interface or config.site_interface # config.usernames is initialised with a dict for each family name family_name = str(fam) if family_name in config.usernames: user = user or config.usernames[family_name].get(code) \ or config.usernames[family_name].get('*') sysop = sysop or config.sysopnames[family_name].get(code) \ or config.sysopnames[family_name].get('*') if not isinstance(interface, type): # If it isnt a class, assume it is a string try: tmp = __import__('pywikibot.site', fromlist=[interface]) interface = getattr(tmp, interface) except ImportError: raise ValueError("Invalid interface name '%(interface)s'" % locals()) if not issubclass(interface, pywikibot.site.BaseSite): warning('Site called with interface=%s' % interface.__name__) user = pywikibot.tools.normalize_username(user) key = '%s:%s:%s:%s' % (interface.__name__, fam, code, user) if key not in _sites or not isinstance(_sites[key], interface): _sites[key] = interface(code=code, fam=fam, user=user, sysop=sysop) debug(u"Instantiated %s object '%s'" % (interface.__name__, _sites[key]), _logger) if _sites[key].code != code: warn('Site %s instantiated using different code "%s"' % (_sites[key], code), UserWarning, 2) return _sites[key] # alias for backwards-compability getSite = pywikibot.tools.redirect_func(Site, old_name='getSite') from .page import ( Page, FilePage, Category, Link, User, ItemPage, PropertyPage, Claim, ) from .page import html2unicode, url2unicode, unicode2html link_regex = re.compile(r'\[\[(?P<title>[^\]|[<>{}]*)(\|.*?)?\]\]') @pywikibot.tools.deprecated("comment parameter for page saving method") def setAction(s): """Set a summary to use for changed page submissions.""" config.default_edit_summary = s def showDiff(oldtext, newtext, context=0): """ Output a string showing the differences between oldtext and newtext. The differences are highlighted (only on compatible systems) to show which changes were made. """ PatchManager(oldtext, newtext, context=context).print_hunks() # Throttle and thread handling stopped = False def stopme(): """Drop this process from the throttle log, after pending threads finish. Can be called manually if desired, but if not, will be called automatically at Python exit. """ global stopped _logger = "wiki" if not stopped: debug(u"stopme() called", _logger) def remaining(): remainingPages = page_put_queue.qsize() - 1 # -1 because we added a None element to stop the queue remainingSeconds = datetime.timedelta( seconds=(remainingPages * config.put_throttle)) return (remainingPages, remainingSeconds) page_put_queue.put((None, [], {})) stopped = True if page_put_queue.qsize() > 1: num, sec = remaining() format_values = dict(num=num, sec=sec) output(u'\03{lightblue}' u'Waiting for %(num)i pages to be put. ' u'Estimated time remaining: %(sec)s' u'\03{default}' % format_values) while(_putthread.isAlive()): try: _putthread.join(1) except KeyboardInterrupt: if input_yn('There are %i pages remaining in the queue. ' 'Estimated time remaining: %s\nReally exit?' % remaining(), default=False, automatic_quit=False): return # only need one drop() call because all throttles use the same global pid try: list(_sites.values())[0].throttle.drop() log(u"Dropped throttle(s).") except IndexError: pass import atexit atexit.register(stopme) # Create a separate thread for asynchronous page saves (and other requests) def async_manager(): """Daemon; take requests from the queue and execute them in background.""" while True: (request, args, kwargs) = page_put_queue.get() if request is None: break request(*args, **kwargs) page_put_queue.task_done() def async_request(request, *args, **kwargs): """Put a request on the queue, and start the daemon if necessary.""" if not _putthread.isAlive(): try: page_put_queue.mutex.acquire() try: _putthread.start() except (AssertionError, RuntimeError): pass finally: page_put_queue.mutex.release() page_put_queue.put((request, args, kwargs)) # queue to hold pending requests page_put_queue = Queue(config.max_queue_size) # set up the background thread _putthread = threading.Thread(target=async_manager) # identification for debugging purposes _putthread.setName('Put-Thread') _putthread.setDaemon(True) wrapper = pywikibot.tools.ModuleDeprecationWrapper(__name__) wrapper._add_deprecated_attr('ImagePage', FilePage) wrapper._add_deprecated_attr( 'PageNotFound', pywikibot.exceptions.DeprecatedPageNotFoundError, warning_message=('{0}.{1} is deprecated, and no longer ' 'used by pywikibot; use http.fetch() instead.')) wrapper._add_deprecated_attr( 'UserActionRefuse', pywikibot.exceptions._EmailUserError, warning_message='UserActionRefuse is deprecated; ' 'use UserRightsError and/or NotEmailableError')
hperala/kontuwikibot
pywikibot/__init__.py
Python
mit
26,829
# import libraries import urllib.request from feedgen.feed import FeedGenerator from post_parser import post_title, post_author, post_time, post_files_num from misc import is_number # info baseurl = 'http://phya.snu.ac.kr/xe/underbbs/' url ='http://phya.snu.ac.kr/xe/index.php?mid=underbbs&category=372' # notices + general f = open('srl_notices.txt','r') num_notices = f.read().split(',') f.close() g = open('srl_general.txt','r') num_general = g.read().split(',') g.close() g = open('srl_general.txt','a') response = urllib.request.urlopen(url) data = response.read() text = data.decode('utf-8') count_new = 0 srl_arr_general = [] text_splitted = text.split('document_srl=') for i in range(1,len(text_splitted)): srl = text_splitted[i].split('">')[0].split('#comment')[0] if(is_number(srl)): if(srl not in num_notices and srl not in srl_arr_general): # second statement : to prevent duplication srl_arr_general.append(srl) if(srl not in num_general): count_new += 1 g.write(',' + srl) print('New post found : ' + srl) g.close() if(count_new != 0): print('Started generating feed...') # make FeedGenerator fg = FeedGenerator() fg.id('asdf') fg.title('SNU Physics Board RSS feed - general') fg.author({'name':'Seungwon Park','email':'yyyyy at snu dot ac dot kr'}) fg.link(href='asdf') fg.subtitle('SNU Physics Board RSS - general') fg.language('ko') for srl in srl_arr_general: print('Parsing post #' + srl + '...') fe = fg.add_entry() fe.id(baseurl + srl) fe.title(post_title(srl)) fe.author({'name':post_author(srl),'email':'unknown'}) fe.link(href = baseurl + srl) atomfeed = fg.atom_str(pretty=True) fg.atom_file('general.xml') print('Added ' + str(count_new) + ' posts to feed.') else: print('Posts are up-to-date.')
seungwonpark/SNU_physics_board_rss
update_general.py
Python
mit
1,781
#!/usr/bin/env python __version__= "$Version: $" __rcsid__="$Id: $" import matplotlib #matplotlib.use('WX') from wx import MilliSleep from wx import SplashScreen, SPLASH_CENTRE_ON_SCREEN, SPLASH_TIMEOUT import os import sys import warnings from . import zpickle from .utils import * from .dialogs.waxy import * from .dialogs import * from .run_sim import * import threading import pylab gray=pylab.cm.gray from matplotlib.backends.backend_wxagg import FigureCanvasWx as FigureCanvas from matplotlib.backends.backend_wx import FigureManager from matplotlib.figure import Figure from matplotlib.axes import Subplot class SimThread(threading.Thread): def __init__(self,params,parent): self.params=params self.parent=parent threading.Thread.__init__(self); def run(self): run_sim(self.params,self.parent) def subplot(*args): import pylab if len(args)==1: return pylab.subplot(args[0]) elif len(args)==3: return pylab.subplot(args[0],args[1],args[2]) elif len(args)==4: r=args[2] c=args[3] return pylab.subplot(args[0],args[1],c+(r-1)*args[1]); else: raise ValueError("invalid number of arguments") class MainFrame(Frame): def __init__(self,parent=None,title='',direction='H', size=(750,750),lfname=None,params=None): self.fig=None # turn off security warning on tmpnam. why is it here? warnings.filterwarnings('ignore') fname=os.tempnam() warnings.resetwarnings() self.base_dir=os.path.dirname(__file__) if not self.base_dir: self.base_dir='.' self.tmpfile=fname+"_plasticity.dat" self.modified=False self.running=False self.stopping=False self.quitting=False self.plot_first=False if not params: self.params=default_params() else: self.params=params for p in self.params['pattern_input']: if not os.path.exists(p['filename']): p['filename']=self.base_dir+"/"+p['filename'] if lfname: if not self.__load_sim__(lfname): self.plot_first=True Frame.__init__(self,parent,title,direction,size) def Body(self): self.CreateMenu() self.CenterOnScreen() self.ResetTitle() fname=self.base_dir+"/images/plasticity_small_icon.ico" self.SetIcon(fname) self.fig = Figure(figsize=(7,5),dpi=100) self.canvas = FigureCanvas(self, -1, self.fig) self.figmgr = FigureManager(self.canvas, 1, self) self.axes = [self.fig.add_subplot(221), self.fig.add_subplot(222), self.fig.add_subplot(223), self.fig.add_subplot(224)] if self.plot_first: sim=zpickle.load(self.tmpfile) sim['params']['display']=True self.Plot(sim) def Stopping(self): return self.stopping def Yield(self): wx.Yield() def ResetTitle(self): (root,sfname)=os.path.split(self.params['save_sim_file']) if self.modified: s=' (*)' else: s='' title='Plasticity: %s%s' % (sfname,s) self.SetTitle(title) def Plot(self,sim): if not sim['params']['display']: return if sim['params']['display_module']: try: module=__import__(sim['params']['display_module'],fromlist=['UserPlot']) except ImportError: sim['params']['display']=False dlg = MessageDialog(self, "Error","Error in Import: %s. Turning display off" % sim['params']['display_module'], icon='error') dlg.ShowModal() dlg.Destroy() return try: module.UserPlot(self,sim) return except ValueError: sim['params']['display']=False dlg = MessageDialog(self, "Error","Error in display. Turning display off", icon='error') dlg.ShowModal() dlg.Destroy() return try: im=weights2image(sim['params'],sim['weights']) self.axes[0].hold(False) self.axes[0].set_axis_bgcolor('k') self.axes[0].pcolor(im,cmap=gray,edgecolors='k') self.axes[0].set_aspect('equal') num_moments=sim['moments_mat'].shape[0] self.axes[1].hold(False) num_neurons=sim['moments_mat'].shape[1] for k in range(num_neurons): for i in range(num_moments): self.axes[1].plot(sim['moments_mat'][i,k,:],'-o') self.axes[1].hold(True) self.axes[2].hold(False) response_mat=sim['response_mat'] response_var_list=sim['response_var_list'] styles=['b-o','g-o'] for i,r in enumerate(response_var_list[-1]): x=r[1] y=r[2] self.axes[2].plot(x,y,styles[i]) self.axes[2].hold(True) self.axes[3].hold(False) styles=['b-o','g-o'] for i,r in enumerate(response_mat): self.axes[3].plot(r,styles[i]) self.axes[3].hold(True) self.canvas.draw() self.canvas.gui_repaint() except ValueError: sim['params']['display']=False dlg = MessageDialog(self, "Error","Error in display. Turning display off", icon='error') dlg.ShowModal() dlg.Destroy() def Run_Pause(self,event): if not self.running: # pylab.close() self.params['tmpfile']=self.tmpfile if os.path.exists(self.tmpfile): self.params['continue']=1 self.modified=True self.ResetTitle() self.running=True ## d={} ## d['params']=self.params ## zpickle.save(d,'plasticity_tmpparams.dat') ## cmd='./run_sim.py --paramfile plasticity_tmpparams.dat --from_gui 1' ## os.system(cmd) self.stopping=False run_sim(self.params,self) self.params['load_sim_file']=self.tmpfile self.running=False if self.quitting: self.Quit() else: self.stopping=True def __load_sim__(self,lfname): sim=zpickle.load(lfname) params=sim['params'] params['save_sim_file']=self.params['save_sim_file'] params['load_sim_file']='' params['continue']=False try: params['initial_weights']=sim['weights'] params['initial_moments']=sim['moments'] except KeyError: self.params=params return 1 params['load_sim_file']=self.tmpfile params['continue']=True sim['params']=params self.params=params zpickle.save(sim,self.tmpfile) return 0 def Reset_Simulation(self,event=None): if not os.path.exists(self.tmpfile): return self.canvas.Show(False) if self.modified: (root,sfname)=os.path.split(self.params['save_sim_file']) dlg=MessageDialog(self, text="Do you want to save the changes you made to %s?" % sfname, title="Reset", ok=0, yes_no=1,cancel=1) result=dlg.ShowModal() dlg.Destroy() if result == 'cancel': self.canvas.Show(True) return elif result == 'yes': filename=self.Save_Simulation() if not filename: # cancelled the save self.canvas.Show(True) return self.params['continue']=False self.params['load_sim_file']='' self.params['initial_weights']=[] self.params['initial_moments']=[] for a in self.axes: a.cla() self.canvas.draw() self.canvas.Show(True) def Restart(self,event=None): if not os.path.exists(self.tmpfile): return self.canvas.Show(False) if self.modified: (root,sfname)=os.path.split(self.params['save_sim_file']) dlg=MessageDialog(self, text="Do you want to save the changes you made to %s?" % sfname, title="Restart", ok=0, yes_no=1,cancel=1) result=dlg.ShowModal() dlg.Destroy() if result == 'cancel': self.canvas.Show(True) return elif result == 'yes': filename=self.Save_Simulation() if not filename: # cancelled the save self.canvas.Show(True) return self.__load_sim__(self.tmpfile) self.params['continue']=False self.canvas.Show(True) def Load_Simulation(self,event=None): self.canvas.Show(False) if self.modified: (root,sfname)=os.path.split(self.params['save_sim_file']) dlg=MessageDialog(self, text="Do you want to save the changes you made to %s?" % sfname, title="Load Simulation", ok=0, yes_no=1,cancel=1) result=dlg.ShowModal() dlg.Destroy() if result == 'cancel': pass elif result == 'yes': self.Save_Simulation() lfname='' dlg = FileDialog(self, "Load Simulation",default_dir=os.getcwd()+"/sims", wildcard='DAT Files|*.dat|All Files|*.*') result = dlg.ShowModal() if result == 'ok': lfname = dlg.GetPaths()[0] dlg.Destroy() if not lfname: self.canvas.Show(True) return self.__load_sim__(lfname) sim=zpickle.load(self.tmpfile) self.Plot(sim) self.canvas.Show(True) def Save_Simulation(self,event=None): if not self.modified: return sfname=self.params['save_sim_file'] def_sfname=default_params()['save_sim_file'] if sfname==def_sfname: filename=self.Save_Simulation_As() else: filename=sfname d=zpickle.load(self.tmpfile) d['params']=self.params zpickle.save(d,sfname) self.modified=False self.ResetTitle() return filename def Save_Simulation_As(self,event=None): self.canvas.Show(False) dlg = FileDialog(self, "Save Simulation As...",default_dir=os.getcwd()+"/sims/", wildcard='DAT Files|*.dat|All Files|*.*',save=1) result = dlg.ShowModal() if result == 'ok': filename = dlg.GetPaths()[0] else: filename=None dlg.Destroy() if filename: d=zpickle.load(self.tmpfile) self.params['save_sim_file']=filename d['params']=self.params zpickle.save(d,filename) self.modified=False self.ResetTitle() self.canvas.Show(True) return filename def Set_Simulation_Parameters(self,event): self.canvas.Show(False) set_simulation_parameters(self.params,self) self.canvas.Show(True) def Set_Input_Parameters(self,event): self.canvas.Show(False) set_input_parameters(self.params,self) self.canvas.Show(True) def Set_Output_Parameters(self,event): self.canvas.Show(False) set_output_parameters(self.params,self) self.canvas.Show(True) def Set_Weight_Parameters(self,event): self.canvas.Show(False) set_weight_parameters(self.params,self) self.canvas.Show(True) def Save_Parameters_As(self,event): save_parameters_as(self.params,self) def Set_Parameter_Structure(self,event): set_parameter_structure(self.params,self) def Load_Parameters(self,event): p=load_parameters(None,self) if p: self.params=p def CreateMenu(self): menubar = MenuBar() menu = Menu(self) menu.Append("L&oad State", self.Load_Simulation, "Load a Complete Simulation",hotkey="Ctrl+O") menu.Append("Load &Parameters", self.Load_Parameters, "Load Simulation Parameters") menu.AppendSeparator() menu.Append("Save Parameters As...", self.Save_Parameters_As, "Save Simulation Parameters") menu.Append("Save State As...", self.Save_Simulation_As, "Save a Complete Simulation") menu.Append("Save State", self.Save_Simulation, "Save a Complete Simulation",hotkey="Ctrl+S") menu.AppendSeparator() menu.Append("&Run/Pause", self.Run_Pause, "Run a Simulation",hotkey="Ctrl+P") menu.Append("Restart from Current State", self.Restart) menu.Append("Reset Simulation", self.Reset_Simulation,hotkey="Ctrl+R") menu.AppendSeparator() menu.Append("Export Figure...", self.Export, "Export the Screen") menu.Append("&Quit", self.Quit, "Quit",hotkey="Ctrl+Q") menubar.Append(menu, "&File") menu = Menu(self) menu.Append("&Simulation Parameters", self.Set_Simulation_Parameters) menu.Append("&Input Parameters", self.Set_Input_Parameters) menu.Append("&Output Neuron Parameters", self.Set_Output_Parameters) menu.Append("&Weight Parameters", self.Set_Weight_Parameters) menu.AppendSeparator() menu.Append("&Display", self.Display) menu.Append("Make &New Input Files", self.Nop) menu.Append("Parameter Structure", self.Set_Parameter_Structure) menubar.Append(menu, "&Edit") menu=Menu(self) menu.Append("&Help", self.Nop) menu.Append("&About", self.About) menubar.Append(menu, "&Help") self.SetMenuBar(menubar) self.CreateStatusBar() def Display(self,event=None): self.canvas.Show(False) dlg = FileDialog(self, "Choose Display Module",default_dir=os.getcwd()+"/", wildcard='Python Plot Files|plot*.py|All Files|*.*') result = dlg.ShowModal() dlg.Destroy() if result == 'ok': lfname = dlg.GetPaths()[0] modulename=os.path.splitext(os.path.split(lfname)[-1])[0] self.params['display_module']=modulename if os.path.exists(self.tmpfile): sim=zpickle.load(self.tmpfile) self.Plot(sim) self.canvas.Show(True) def About(self,event): win=AboutWindow() win.Show() def Nop(self,event): self.canvas.Show(False) dlg = MessageDialog(self, "Error","Function Not Implemented",icon='error') dlg.ShowModal() dlg.Destroy() self.canvas.Show(True) def Export(self,event=None): export_fig(self) def Quit(self,event=None): if self.running: self.quitting=True self.stopping=True return self.canvas.Show(False) if self.modified: (root,sfname)=os.path.split(self.params['save_sim_file']) dlg=MessageDialog(self, text="Do you want to save the changes you made to %s?" % sfname, title="Quit", ok=0, yes_no=1,cancel=1) result=dlg.ShowModal() dlg.Destroy() if result == 'cancel': self.canvas.Show(True) return elif result == 'yes': self.Save_Simulation() self.Close() if os.path.exists(self.tmpfile): os.remove(self.tmpfile) def run(lfname=None,params=None,use_splash=True): if use_splash: app1=Application(splash.SplashFrame) app1.Run() app = Application(MainFrame, title="Plasticity",lfname=lfname, params=params) app.Run() if __name__ == '__main__': from optparse import OptionParser parser = OptionParser() parser.add_option( "--nosplash", action="store_false", dest="splash", default=True, help="don't show the splash screen") (options, args) = parser.parse_args() if options.splash: app1=Application(splash.SplashFrame) app1.Run() if len(args)>=1: lfname=args[0] else: lfname=None run(lfname)
bblais/plasticity
plasticity/run.py
Python
mit
17,982
import requests import json def test_api_endpoint_existence(todolist_app): with todolist_app.test_client() as client: resp = client.get('/tasks') assert resp.status_code == 200 def test_task_creation(todolist_app): with todolist_app.test_client() as client: resp = client.jpost( '/tasks', { "title": "First task" } ) assert resp['status'] == 'success' assert 'id' in resp['result'] assert resp['result']['id'] == 1 def test_task_updation(todolist_app): with todolist_app.test_client() as client: modified_title = "First task - modified" resp = client.jput( '/tasks/1', { "title": "First task - modified" } ) assert resp['status'] == 'success' assert 'id' in resp['result'] assert resp['result']['title'] == modified_title
inkmonk/flask-sqlalchemy-booster
tests/test_view_boosters/test_todo_list_api/test_api.py
Python
mit
775
######################################## # Automatically generated, do not edit. ######################################## from pyvisdk.thirdparty import Enum EventCategory = Enum( 'error', 'info', 'user', 'warning', )
xuru/pyvisdk
pyvisdk/enums/event_category.py
Python
mit
239
import requests import csv from configparser import ConfigParser config = ConfigParser() config.read("config.cfg") token = config.get("auth", "token") domain = config.get("instance", "domain") headers = {"Authorization" : "Bearer %s" % token} source_course_id = 311693 csv_file = "" payload = {'migration_type': 'course_copy_importer', 'settings[source_course_id]': source_course_id} with open(csv_file, 'rb') as courses: coursesreader = csv.reader(courses) for course in coursesreader: uri = domain + "/api/v1/courses/sis_course_id:%s/content_migrations" % course r = requests.post(uri, headers=headers,data=payload) print r.status_code + " " + course
tylerclair/canvas_admin_scripts
course_copy_csv.py
Python
mit
684
import math import re from collections import defaultdict def matches(t1, t2): t1r = "".join([t[-1] for t in t1]) t2r = "".join([t[-1] for t in t2]) t1l = "".join([t[0] for t in t1]) t2l = "".join([t[0] for t in t2]) t1_edges = [t1[0], t1[-1], t1r, t1l] t2_edges = [t2[0], t2[-1], t2[0][::-1], t2[-1][::-1], t2l, t2l[::-1], t2r, t2r[::-1]] for et1 in t1_edges: for et2 in t2_edges: if et1 == et2: return True return False def flip(t): return [l[::-1] for l in t] # https://stackoverflow.com/a/34347121 def rotate(t): return [*map("".join, zip(*reversed(t)))] def set_corner(cor, right, down): rr = "".join([t[-1] for t in right]) dr = "".join([t[-1] for t in down]) rl = "".join([t[0] for t in right]) dl = "".join([t[0] for t in down]) r_edges = [right[0], right[-1], right[0][::-1], right[-1][::-1], rr, rr[::-1], rl, rl[::-1]] d_edges = [down[0], down[-1], down[0][::-1], down[-1][::-1], dr, dr[::-1], dl, dl[::-1]] for _ in range(2): cor = flip(cor) for _ in range(4): cor = rotate(cor) if cor[-1] in d_edges and "".join([t[-1] for t in cor]) in r_edges: return cor return None def remove_border(t): return [x[1:-1] for x in t[1:-1]] def set_left_edge(t1, t2): ref = "".join([t[-1] for t in t1]) for _ in range(2): t2 = flip(t2) for _ in range(4): t2 = rotate(t2) if "".join([t[0] for t in t2]) == ref: return t2 return None def set_upper_edge(t1, t2): ref = t1[-1] for _ in range(2): t2 = flip(t2) for _ in range(4): t2 = rotate(t2) if t2[0] == ref: return t2 return None def assemble_image(img, tiles): whole_image = [] for l in img: slice = [""] * len(tiles[l[0]]) for t in l: for i, s in enumerate(tiles[t]): slice[i] += s for s in slice: whole_image.append(s) return whole_image def part1(): tiles = defaultdict(list) for l in open("input.txt"): if "Tile" in l: tile = int(re.findall(r"\d+", l)[0]) elif "." in l or "#" in l: tiles[tile].append(l.strip()) connected = defaultdict(set) for i in tiles: for t in tiles: if i == t: continue if matches(tiles[i], tiles[t]): connected[i].add(t) connected[t].add(i) prod = 1 for i in connected: if len(connected[i]) == 2: prod *= i print(prod) def part2(): tiles = defaultdict(list) for l in open("input.txt"): if "Tile" in l: tile = int(re.findall(r"\d+", l)[0]) elif "." in l or "#" in l: tiles[tile].append(l.strip()) connected = defaultdict(set) for i in tiles: for t in tiles: if i == t: continue if matches(tiles[i], tiles[t]): connected[i].add(t) connected[t].add(i) sz = int(math.sqrt(len(connected))) image = [[0 for _ in range(sz)] for _ in range(sz)] for i in connected: if len(connected[i]) == 2: corner = i break image[0][0] = corner added = {corner} for y in range(1, sz): pos = connected[image[0][y - 1]] for cand in pos: if cand not in added and len(connected[cand]) < 4: image[0][y] = cand added.add(cand) break for x in range(1, sz): for y in range(sz): pos = connected[image[x - 1][y]] for cand in pos: if cand not in added: image[x][y] = cand added.add(cand) break tiles[image[0][0]] = set_corner(tiles[image[0][0]], tiles[image[0][1]], tiles[image[1][0]]) for y, l in enumerate(image): if y != 0: prv = image[y - 1][0] tiles[l[0]] = set_upper_edge(tiles[prv], tiles[l[0]]) for x, tile in enumerate(l): if x != 0: prv = image[y][x - 1] tiles[tile] = set_left_edge(tiles[prv], tiles[tile]) for t in tiles: tiles[t] = remove_border(tiles[t]) image = assemble_image(image, tiles) ky = 0 monster = set() for l in open("monster.txt").read().split("\n"): kx = len(l) for i, ch in enumerate(l): if ch == "#": monster.add((i, ky)) ky += 1 for _ in range(2): image = flip(image) for _ in range(4): image = rotate(image) for x in range(0, len(image) - kx): for y in range(0, len(image) - ky): parts = [] for i, p in enumerate(monster): dx = x + p[0] dy = y + p[1] parts.append(image[dy][dx] == "#") if all(parts): for p in monster: dx = x + p[0] dy = y + p[1] image[dy] = image[dy][:dx] + "O" + image[dy][dx + 1 :] with open("output.txt", "w+") as f: for l in rotate(rotate(rotate(image))): f.write(l + "\n") print(sum([l.count("#") for l in image])) if __name__ == "__main__": part1() part2()
BrendanLeber/adventofcode
2020/20-jurassic_jigsaw/code.py
Python
mit
5,544
def is_isogram(s): """ Determine if a word or phrase is an isogram. An isogram (also known as a "nonpattern word") is a word or phrase without a repeating letter. Examples of isograms: - lumberjacks - background - downstream """ from collections import Counter s = s.lower().strip() s = [c for c in s if c.isalpha()] counts = Counter(s).values() return max(counts or [1]) == 1
developerQuinnZ/this_will_work
student-work/hobson_lane/exercism/python/isogram/isogram.py
Python
mit
428
import argparse from PGEnv import PGEnvironment from PGAgent import PGAgent if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--gym_environment', type=str, default='Pong-v0', help='OpenAI Gym Environment to be used (default to Pong-v0)') parser.add_argument('--mode', type=str, default='train', choices=['train', 'test'], help='running mode (default to train)') parser.add_argument('--use_gpu', type=bool, default=False, help='whether to use GPU (default to True)') parser.add_argument('--gpu_id', type=int, default=0, help='the id of the GPU to be used (default to 0)') parser.add_argument('--model_save_path', type=str, default='./model/PG_model.ckpt', help='path to save/load the model for training/testing (default to model/PG_model.ckpt)') parser.add_argument('--check_point', type=int, default=None, help='index of the ckeck point (default to None)') parser.add_argument('--model_save_freq', type=int, default=100, help='dump model at every k-th iteration (default to 100)') parser.add_argument('--display', type=bool, default=False, help='whether to render to result. (default to False)') args = parser.parse_args() if args.mode == 'train': env = PGEnvironment(environment_name=args.gym_environment, display=args.display) agent = PGAgent(env) assert(args.model_save_path is not None) agent.learn(model_save_frequency=args.model_save_freq, model_save_path=args.model_save_path, check_point = args.check_point, use_gpu=args.use_gpu, gpu_id=args.gpu_id) else: # disable frame skipping during testing result in better performance (because the agent can take more actions) env = PGEnvironment(environment_name=args.gym_environment, display=args.display, frame_skipping=False) agent = PGAgent(env) assert(args.check_point is not None) agent.test(model_save_path = args.model_save_path, check_point=args.check_point, use_gpu=args.use_gpu, gpu_id=args.gpu_id) print('finished.')
YuMao1993/DRL
PG/main.py
Python
mit
2,270
from feature import * from pymongo import MongoClient from bson.binary import Binary as BsonBinary import pickle import os from operator import itemgetter import time import sys imagelocation = "" #Input Image path indir = "" #Directory Path client = MongoClient('mongodb://localhost:27017') db = client.coil #Insert your database in place of coil col = db.images #Insert your collection in place of images class Image(object): """docstring for Image""" def __init__(self, path): self.path = path img = cv2.imread(self.path,0) imgm = preprocess(img) segm = segment(imgm) self.glfeature = globalfeature(imgm,16) self.llfeature = localfeature(segm) self.numberofones = self.glfeature.sum(dtype=int) start_time = time.time() count = 0 for root, dirs, filenames in os.walk(indir): for f in filenames: i1 = Image(f) count = count+1 perc = (count/360) * 100 sys.stdout.write("\r%d%%" % perc) sys.stdout.flush() new_posts = [{'path': i1.path, 'llfeature': BsonBinary(pickle.dumps(i1.llfeature,protocol=2)), 'glfeature': BsonBinary(pickle.dumps(i1.glfeature,protocol=2)), 'numberofones' : int(i1.numberofones)}] post_id = col.insert(new_posts) # print(post_id) img = Image(imagelocation) count = 0 maxglosim = 0 maxlocsim = 0 maximum = 0 gridmax=0 vectormax=0 for f in col.find(): llfeature = pickle.loads(f['llfeature']) glfeature = pickle.loads(f['glfeature']) count = count+1 perc = (count/360) * 100 sys.stdout.write("\r%d%%" % perc) sys.stdout.flush() locsim = np.absolute((llfeature-img.llfeature).sum()) glosim = np.logical_xor(glfeature,img.glfeature).sum() distance = locsim+glosim if(glosim>maxglosim): gridmax = glfeature maxglosim=glosim if(locsim>maxlocsim): maxlocsim=locsim vectormax = llfeature if(distance>maximum): vectmostdif= llfeature gridmostdif = glfeature maximum = distance maxilocsim = np.absolute((vectormax-img.llfeature).sum()) maxiglosim = np.logical_xor(gridmax,img.glfeature).sum() processed_time = time.time() print("\nTotal Processing Time : {0:.2f} seconds".format(processed_time-start_time)) def gloDist(gridA,gridB): glosim = np.logical_xor(gridA,gridB).sum() return glosim/maxiglosim def locDist(vectorA,vectorB): locsim = np.absolute((vectorA-vectorB).sum()) return locsim/maxilocsim ranking = [] count = 0 print("\nSearching:") for f in col.find(): llfeature = pickle.loads(f['llfeature']) glfeature = pickle.loads(f['glfeature']) count = count+1 perc = (count/360) * 100 sys.stdout.write("\r%d%%" % perc) sys.stdout.flush() g1 = gloDist(glfeature,img.glfeature) l1 = locDist(llfeature,img.llfeature) sim = ((2-(g1+l1))/2)*100 ranking.append([sim,f['path']]) search_time = time.time() print("\nTotal Searching Time : {0:.2f} seconds".format(search_time-processed_time)) print("\nTotal Time : {0:.2f} seconds".format(search_time-start_time)) ranking = sorted(ranking, key=itemgetter(0),reverse=True) #Ranking : Results in a list
devashishp/Content-Based-Image-Retrieval
optimized.py
Python
mit
3,209
#PROJECT from outcome import Outcome from odds import Odds class Bin: def __init__( self, *outcomes ): self.outcomes = set([outcome for outcome in outcomes]) def add_outcome( self, outcome ): self.outcomes.add(outcome) def __str__(self): return ', '.join([str(outcome) for outcome in self.outcomes]) class BinBuilder: def __init__( self, wheel ): self.wheel = wheel def build_bins(self): self.straight_bets() self.split_bets() self.street_bets() self.corner_bets() self.five_bet() self.line_bets() self.dozen_bets() self.column_bets() self.even_money_bets() def straight_bets(self): outcomes = [ Outcome(str(i), Odds.STRAIGHT_BET) for i in range(37) ] + [Outcome('00', Odds.STRAIGHT_BET)] for i, outcome in enumerate(outcomes): self.wheel.add_outcome(i, outcome) def split_bets(self): for row in range(12): for direction in [1, 2]: n = 3 * row + direction bins = [n, n + 1] outcome = Outcome( 'split {}'.format('-'.join([str(i) for i in bins])), Odds.SPLIT_BET ) for bin in bins: self.wheel.add_outcome(bin, outcome) for n in range(1, 34): bins = [n, n + 3] outcome = Outcome( 'split {}'.format('-'.join([str(i) for i in bins])), Odds.SPLIT_BET ) for bin in bins: self.wheel.add_outcome(bin, outcome) def street_bets(self): for row in range(12): n = 3 * row + 1 bins = [n, n + 1, n + 2] outcome = Outcome( 'street {}-{}'.format(bins[0], bins[-1]), Odds.STREET_BET ) for bin in bins: self.wheel.add_outcome(bin, outcome) def corner_bets(self): for col in [1, 2]: for row in range(11): n = 3 * row + col bins = [n + i for i in [0, 1, 3, 4]] outcome = Outcome( 'corner {}'.format('-'.join([str(i) for i in bins])), Odds.CORNER_BET ) for bin in bins: self.wheel.add_outcome(bin, outcome) def five_bet(self): outcome = Outcome( 'five bet 00-0-1-2-3', Odds.FIVE_BET ) for bin in [0, 1, 2, 3, 37]: self.wheel.add_outcome(bin, outcome) def line_bets(self): for row in range(11): n = 3 * row + 1 bins = [n + i for i in range(6)] outcome = Outcome( 'line {}-{}'.format(bins[0], bins[-1]), Odds.LINE_BET ) for bin in bins: self.wheel.add_outcome(bin, outcome) def dozen_bets(self): #https://pypi.python.org/pypi/inflect/0.2.4 dozen_map = { 1: '1st', 2: '2nd', 3: '3rd' } for d in range(3): outcome = Outcome( '{} 12'.format(dozen_map[d + 1]), Odds.DOZEN_BET ) for m in range(12): self.wheel.add_outcome(12 * d + m + 1, outcome) def column_bets(self): for c in range(3): outcome = Outcome( 'column {}'.format(c + 1), Odds.COLUMN_BET ) for r in range(12): self.wheel.add_outcome(3 * r + c + 1, outcome) def even_money_bets(self): for bin in range(1, 37): if 1 <= bin < 19: name = '1 to 18' #low else: name = '19 to 36' #high self.wheel.add_outcome( bin, Outcome(name, Odds.EVEN_MONEY_BET) ) if bin % 2: name = 'odd' else: name = 'even' self.wheel.add_outcome( bin, Outcome(name, Odds.EVEN_MONEY_BET) ) if bin in ( [1, 3, 5, 7, 9] + [12, 14, 16, 18] + [19, 21, 23, 25, 27] + [30, 32, 34, 36] ): name = 'red' else: name = 'black' self.wheel.add_outcome( bin, Outcome(name, Odds.EVEN_MONEY_BET) )
ddenhartog/itmaybeahack-roulette
bin.py
Python
mit
4,662
#!/usr/bin/env python # -*- coding: utf8 -*- """The Tornado web framework. 核心模块, 参考示例使用代码: - 重要模块: - tornado.web - tornado.ioloop # 根据示例,可知入口在此.参看: ioloop.py - tornado.httpserver The Tornado web framework looks a bit like web.py (http://webpy.org/) or Google's webapp (http://code.google.com/appengine/docs/python/tools/webapp/), but with additional tools and optimizations to take advantage of the Tornado non-blocking web server and tools. Here is the canonical "Hello, world" example app: import tornado.httpserver import tornado.ioloop import tornado.web class MainHandler(tornado.web.RequestHandler): def get(self): self.write("Hello, world") if __name__ == "__main__": application = tornado.web.Application([ (r"/", MainHandler), ]) http_server = tornado.httpserver.HTTPServer(application) http_server.listen(8888) tornado.ioloop.IOLoop.instance().start() See the Tornado walkthrough on GitHub for more details and a good getting started guide. """ import base64 import binascii import calendar import Cookie import cStringIO import datetime import email.utils import escape import functools import gzip import hashlib import hmac import httplib import locale import logging import mimetypes import os.path import re import stat import sys import template import time import types import urllib import urlparse import uuid """ # 模块说明: 核心模块 RequestHandler() 需要处理哪些工作: - 1. HTTP方法支持(GET,POST, HEAD, DELETE, PUT), 预定义各种接口 - 2. 预定义接口: 配对定义[类似 unittest 的 setUp(), tearDown() 方法] - prepare() # 运行前, 准备工作 - on_connection_close() # 运行后, 清理工作 - 根据需要, 选择使用 - 3. cookies处理: - set - get - clear - 4. HTTP头处理: - set_status() # 状态码 - set_header() # 头信息 - 5. 重定向: - redirect() """ class RequestHandler(object): """Subclass this class and define get() or post() to make a handler. If you want to support more methods than the standard GET/HEAD/POST, you should override the class variable SUPPORTED_METHODS in your RequestHandler class. 译: 1. 继承此类,并自定义get(), post()方法,创建 handler 2. 若需要支持更多方法(GET/HEAD/POST), 需要 在 子类中 覆写 类变量 SUPPORTED_METHODS """ SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PUT") def __init__(self, application, request, transforms=None): self.application = application self.request = request self._headers_written = False self._finished = False self._auto_finish = True self._transforms = transforms or [] self.ui = _O((n, self._ui_method(m)) for n, m in application.ui_methods.iteritems()) self.ui["modules"] = _O((n, self._ui_module(n, m)) for n, m in application.ui_modules.iteritems()) self.clear() # Check since connection is not available in WSGI if hasattr(self.request, "connection"): self.request.connection.stream.set_close_callback( self.on_connection_close) # 注意 self.on_connection_close() 调用时机 @property def settings(self): return self.application.settings # 如下这部分, 默认的接口定义, 如果子类没有覆写这些方法,就直接抛出异常. # 也就是说: 这些接口, 必须要 覆写,才可以用 def head(self, *args, **kwargs): raise HTTPError(405) def get(self, *args, **kwargs): raise HTTPError(405) def post(self, *args, **kwargs): raise HTTPError(405) def delete(self, *args, **kwargs): raise HTTPError(405) def put(self, *args, **kwargs): raise HTTPError(405) # 预定义接口: 准备工作函数, 给需要 个性化配置用 # 注意调用时机: self._execute() def prepare(self): """Called before the actual handler method. Useful to override in a handler if you want a common bottleneck for all of your requests. """ pass # 预定义接口2: 执行完后, 附带清理工作.(根据需要自行修改) # 注意调用时机: __init__() def on_connection_close(self): """Called in async handlers if the client closed the connection. You may override this to clean up resources associated with long-lived connections. Note that the select()-based implementation of IOLoop does not detect closed connections and so this method will not be called until you try (and fail) to produce some output. The epoll- and kqueue- based implementations should detect closed connections even while the request is idle. """ pass def clear(self): """Resets all headers and content for this response.""" self._headers = { "Server": "TornadoServer/1.0", "Content-Type": "text/html; charset=UTF-8", } if not self.request.supports_http_1_1(): if self.request.headers.get("Connection") == "Keep-Alive": self.set_header("Connection", "Keep-Alive") self._write_buffer = [] self._status_code = 200 # 设置 HTTP状态码 def set_status(self, status_code): """Sets the status code for our response.""" assert status_code in httplib.responses # 使用 assert 方式 作条件判断, 出错时,直接抛出 self._status_code = status_code # 设置 HTTP头信息 # 根据 value 类型, 作 格式转换处理 def set_header(self, name, value): """Sets the given response header name and value. If a datetime is given, we automatically format it according to the HTTP specification. If the value is not a string, we convert it to a string. All header values are then encoded as UTF-8. """ if isinstance(value, datetime.datetime): t = calendar.timegm(value.utctimetuple()) value = email.utils.formatdate(t, localtime=False, usegmt=True) elif isinstance(value, int) or isinstance(value, long): value = str(value) else: value = _utf8(value) # If \n is allowed into the header, it is possible to inject # additional headers or split the request. Also cap length to # prevent obviously erroneous values. safe_value = re.sub(r"[\x00-\x1f]", " ", value)[:4000] # 正则过滤 + 截取4000长度字符串 if safe_value != value: raise ValueError("Unsafe header value %r", value) self._headers[name] = value _ARG_DEFAULT = [] def get_argument(self, name, default=_ARG_DEFAULT, strip=True): """Returns the value of the argument with the given name. If default is not provided, the argument is considered to be required, and we throw an HTTP 404 exception if it is missing. If the argument appears in the url more than once, we return the last value. The returned value is always unicode. """ args = self.get_arguments(name, strip=strip) if not args: if default is self._ARG_DEFAULT: raise HTTPError(404, "Missing argument %s" % name) return default return args[-1] def get_arguments(self, name, strip=True): """Returns a list of the arguments with the given name. If the argument is not present, returns an empty list. The returned values are always unicode. """ values = self.request.arguments.get(name, []) # Get rid of any weird control chars values = [re.sub(r"[\x00-\x08\x0e-\x1f]", " ", x) for x in values] values = [_unicode(x) for x in values] if strip: values = [x.strip() for x in values] return values @property def cookies(self): """A dictionary of Cookie.Morsel objects.""" # 如果不存在,定义cookies # 如果存在, 返回之 if not hasattr(self, "_cookies"): self._cookies = Cookie.BaseCookie() # 定义 if "Cookie" in self.request.headers: try: self._cookies.load(self.request.headers["Cookie"]) # 赋值 except: self.clear_all_cookies() # 异常时,调用 自定义清理函数 return self._cookies def get_cookie(self, name, default=None): """Gets the value of the cookie with the given name, else default.""" if name in self.cookies: # 注意, 因为 cookies() 被定义成 property, 可以直接这样调用 return self.cookies[name].value return default def set_cookie(self, name, value, domain=None, expires=None, path="/", expires_days=None, **kwargs): """Sets the given cookie name/value with the given options. Additional keyword arguments are set on the Cookie.Morsel directly. See http://docs.python.org/library/cookie.html#morsel-objects for available attributes. """ name = _utf8(name) value = _utf8(value) if re.search(r"[\x00-\x20]", name + value): # Don't let us accidentally inject bad stuff raise ValueError("Invalid cookie %r: %r" % (name, value)) if not hasattr(self, "_new_cookies"): self._new_cookies = [] new_cookie = Cookie.BaseCookie() self._new_cookies.append(new_cookie) new_cookie[name] = value if domain: new_cookie[name]["domain"] = domain if expires_days is not None and not expires: expires = datetime.datetime.utcnow() + datetime.timedelta( days=expires_days) if expires: timestamp = calendar.timegm(expires.utctimetuple()) new_cookie[name]["expires"] = email.utils.formatdate( timestamp, localtime=False, usegmt=True) if path: new_cookie[name]["path"] = path for k, v in kwargs.iteritems(): new_cookie[name][k] = v def clear_cookie(self, name, path="/", domain=None): """Deletes the cookie with the given name.""" expires = datetime.datetime.utcnow() - datetime.timedelta(days=365) # 赋空值, 清掉 cookie, 多个web框架,标准实现写法 self.set_cookie(name, value="", path=path, expires=expires, domain=domain) def clear_all_cookies(self): """Deletes all the cookies the user sent with this request.""" # 注: 注意如上2个相关函数 命名特征 # - 单个操作: clear_cookie() # - 批量操作: clear_all_cookies() for name in self.cookies.iterkeys(): self.clear_cookie(name) def set_secure_cookie(self, name, value, expires_days=30, **kwargs): """Signs and timestamps a cookie so it cannot be forged. You must specify the 'cookie_secret' setting in your Application to use this method. It should be a long, random sequence of bytes to be used as the HMAC secret for the signature. To read a cookie set with this method, use get_secure_cookie(). """ # 如下几步, 构造 "安全的cookie", 加 时间戳, 防伪造 timestamp = str(int(time.time())) value = base64.b64encode(value) signature = self._cookie_signature(name, value, timestamp) # 加时间戳 value = "|".join([value, timestamp, signature]) self.set_cookie(name, value, expires_days=expires_days, **kwargs) def get_secure_cookie(self, name, include_name=True, value=None): """Returns the given signed cookie if it validates, or None. In older versions of Tornado (0.1 and 0.2), we did not include the name of the cookie in the cookie signature. To read these old-style cookies, pass include_name=False to this method. Otherwise, all attempts to read old-style cookies will fail (and you may log all your users out whose cookies were written with a previous Tornado version). """ if value is None: value = self.get_cookie(name) if not value: return None parts = value.split("|") if len(parts) != 3: return None if include_name: signature = self._cookie_signature(name, parts[0], parts[1]) else: signature = self._cookie_signature(parts[0], parts[1]) if not _time_independent_equals(parts[2], signature): logging.warning("Invalid cookie signature %r", value) return None timestamp = int(parts[1]) if timestamp < time.time() - 31 * 86400: logging.warning("Expired cookie %r", value) return None # 尝试返回 try: return base64.b64decode(parts[0]) except: return None def _cookie_signature(self, *parts): self.require_setting("cookie_secret", "secure cookies") hash = hmac.new(self.application.settings["cookie_secret"], digestmod=hashlib.sha1) for part in parts: hash.update(part) return hash.hexdigest() # 关键代码: 重定向 # def redirect(self, url, permanent=False): """Sends a redirect to the given (optionally relative) URL.""" if self._headers_written: raise Exception("Cannot redirect after headers have been written") self.set_status(301 if permanent else 302) # Remove whitespace url = re.sub(r"[\x00-\x20]+", "", _utf8(url)) self.set_header("Location", urlparse.urljoin(self.request.uri, url)) self.finish() # 调用处理 # 关键代码: 准备 渲染页面的 数据, 常用接口函数 # 特别说明: # - 这里 write() 方法, 并没有直接 渲染页面, 而是在 准备 渲染数据 # - 实际的 渲染HTML页面操作, 在 finish() 中 def write(self, chunk): """Writes the given chunk to the output buffer. To write the output to the network, use the flush() method below. If the given chunk is a dictionary, we write it as JSON and set the Content-Type of the response to be text/javascript. """ assert not self._finished if isinstance(chunk, dict): chunk = escape.json_encode(chunk) self.set_header("Content-Type", "text/javascript; charset=UTF-8") chunk = _utf8(chunk) self._write_buffer.append(chunk) # 准备 待渲染的 HTML数据 # 关键代码: 渲染页面 # def render(self, template_name, **kwargs): """Renders the template with the given arguments as the response.""" html = self.render_string(template_name, **kwargs) # Insert the additional JS and CSS added by the modules on the page js_embed = [] js_files = [] css_embed = [] css_files = [] html_heads = [] html_bodies = [] for module in getattr(self, "_active_modules", {}).itervalues(): # JS 部分 embed_part = module.embedded_javascript() if embed_part: js_embed.append(_utf8(embed_part)) file_part = module.javascript_files() if file_part: if isinstance(file_part, basestring): js_files.append(file_part) else: js_files.extend(file_part) # CSS 部分 embed_part = module.embedded_css() if embed_part: css_embed.append(_utf8(embed_part)) file_part = module.css_files() if file_part: if isinstance(file_part, basestring): css_files.append(file_part) else: css_files.extend(file_part) # Header 部分 head_part = module.html_head() if head_part: html_heads.append(_utf8(head_part)) body_part = module.html_body() if body_part: html_bodies.append(_utf8(body_part)) # ---------------------------------------------------------- # 如下是 分块处理部分: # - 本质工作: 在 拼接一个 长 HTML 字符串(包含 HTML,CSS,JS) # ---------------------------------------------------------- if js_files: # Maintain order of JavaScript files given by modules paths = [] unique_paths = set() for path in js_files: if not path.startswith("/") and not path.startswith("http:"): path = self.static_url(path) if path not in unique_paths: paths.append(path) unique_paths.add(path) js = ''.join('<script src="' + escape.xhtml_escape(p) + '" type="text/javascript"></script>' for p in paths) sloc = html.rindex('</body>') html = html[:sloc] + js + '\n' + html[sloc:] if js_embed: js = '<script type="text/javascript">\n//<![CDATA[\n' + \ '\n'.join(js_embed) + '\n//]]>\n</script>' sloc = html.rindex('</body>') html = html[:sloc] + js + '\n' + html[sloc:] if css_files: paths = set() for path in css_files: if not path.startswith("/") and not path.startswith("http:"): paths.add(self.static_url(path)) else: paths.add(path) css = ''.join('<link href="' + escape.xhtml_escape(p) + '" ' 'type="text/css" rel="stylesheet"/>' for p in paths) hloc = html.index('</head>') html = html[:hloc] + css + '\n' + html[hloc:] if css_embed: css = '<style type="text/css">\n' + '\n'.join(css_embed) + \ '\n</style>' hloc = html.index('</head>') html = html[:hloc] + css + '\n' + html[hloc:] if html_heads: hloc = html.index('</head>') html = html[:hloc] + ''.join(html_heads) + '\n' + html[hloc:] if html_bodies: hloc = html.index('</body>') html = html[:hloc] + ''.join(html_bodies) + '\n' + html[hloc:] # 注意 self.finish(html) # 关键调用 def render_string(self, template_name, **kwargs): """Generate the given template with the given arguments. We return the generated string. To generate and write a template as a response, use render() above. """ # If no template_path is specified, use the path of the calling file template_path = self.get_template_path() if not template_path: frame = sys._getframe(0) web_file = frame.f_code.co_filename while frame.f_code.co_filename == web_file: frame = frame.f_back template_path = os.path.dirname(frame.f_code.co_filename) if not getattr(RequestHandler, "_templates", None): RequestHandler._templates = {} if template_path not in RequestHandler._templates: loader = self.application.settings.get("template_loader") or\ template.Loader(template_path) RequestHandler._templates[template_path] = loader # 注意 t = RequestHandler._templates[template_path].load(template_name) args = dict( handler=self, request=self.request, current_user=self.current_user, locale=self.locale, _=self.locale.translate, static_url=self.static_url, xsrf_form_html=self.xsrf_form_html, reverse_url=self.application.reverse_url ) args.update(self.ui) args.update(kwargs) return t.generate(**args) def flush(self, include_footers=False): """Flushes the current output buffer to the nextwork.""" if self.application._wsgi: raise Exception("WSGI applications do not support flush()") chunk = "".join(self._write_buffer) self._write_buffer = [] if not self._headers_written: self._headers_written = True for transform in self._transforms: self._headers, chunk = transform.transform_first_chunk( self._headers, chunk, include_footers) headers = self._generate_headers() else: for transform in self._transforms: chunk = transform.transform_chunk(chunk, include_footers) headers = "" # Ignore the chunk and only write the headers for HEAD requests if self.request.method == "HEAD": if headers: self.request.write(headers) # 特别注意 self.request.write() 方法 return if headers or chunk: self.request.write(headers + chunk) # 超级关键代码: 写HTML页面 # # def finish(self, chunk=None): """Finishes this response, ending the HTTP request.""" assert not self._finished if chunk is not None: self.write(chunk) # 特别注意, 这里的关键调用 # Automatically support ETags and add the Content-Length header if # we have not flushed any content yet. if not self._headers_written: if (self._status_code == 200 and self.request.method == "GET" and "Etag" not in self._headers): hasher = hashlib.sha1() for part in self._write_buffer: hasher.update(part) etag = '"%s"' % hasher.hexdigest() inm = self.request.headers.get("If-None-Match") if inm and inm.find(etag) != -1: self._write_buffer = [] self.set_status(304) else: self.set_header("Etag", etag) if "Content-Length" not in self._headers: content_length = sum(len(part) for part in self._write_buffer) self.set_header("Content-Length", content_length) if hasattr(self.request, "connection"): # Now that the request is finished, clear the callback we # set on the IOStream (which would otherwise prevent the # garbage collection of the RequestHandler when there # are keepalive connections) self.request.connection.stream.set_close_callback(None) if not self.application._wsgi: self.flush(include_footers=True) self.request.finish() # 注意调用 self._log() self._finished = True # 给浏览器,返回 内部错误 def send_error(self, status_code=500, **kwargs): """Sends the given HTTP error code to the browser. We also send the error HTML for the given error code as returned by get_error_html. Override that method if you want custom error pages for your application. """ if self._headers_written: logging.error("Cannot send error response after headers written") if not self._finished: self.finish() return self.clear() self.set_status(status_code) message = self.get_error_html(status_code, **kwargs) self.finish(message) # 写出信息 def get_error_html(self, status_code, **kwargs): """Override to implement custom error pages. If this error was caused by an uncaught exception, the exception object can be found in kwargs e.g. kwargs['exception'] """ return "<html><title>%(code)d: %(message)s</title>" \ "<body>%(code)d: %(message)s</body></html>" % { "code": status_code, "message": httplib.responses[status_code], } # 本地配置: 通常用于设置 国际化-语言 (浏览器语言) # @property def locale(self): """The local for the current session. Determined by either get_user_locale, which you can override to set the locale based on, e.g., a user preference stored in a database, or get_browser_locale, which uses the Accept-Language header. """ if not hasattr(self, "_locale"): self._locale = self.get_user_locale() # 配置为 用户设置 if not self._locale: self._locale = self.get_browser_locale() # 配置为 浏览器默认设置 assert self._locale return self._locale # 预定义接口 - 用户配置 # - 使用前, 需覆写该函数 def get_user_locale(self): """Override to determine the locale from the authenticated user. If None is returned, we use the Accept-Language header. """ return None # 默认浏览器设置语言环境 def get_browser_locale(self, default="en_US"): """Determines the user's locale from Accept-Language header. See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4 """ if "Accept-Language" in self.request.headers: languages = self.request.headers["Accept-Language"].split(",") locales = [] for language in languages: parts = language.strip().split(";") if len(parts) > 1 and parts[1].startswith("q="): try: score = float(parts[1][2:]) except (ValueError, TypeError): score = 0.0 else: score = 1.0 locales.append((parts[0], score)) if locales: locales.sort(key=lambda (l, s): s, reverse=True) codes = [l[0] for l in locales] return locale.get(*codes) return locale.get(default) # 获取当前用户 @property def current_user(self): """The authenticated user for this request. Determined by either get_current_user, which you can override to set the user based on, e.g., a cookie. If that method is not overridden, this method always returns None. We lazy-load the current user the first time this method is called and cache the result after that. """ if not hasattr(self, "_current_user"): self._current_user = self.get_current_user() return self._current_user # 预定义接口 - 获取当前用户 # - 使用前, 需覆写 # - 特别说明: 通常都需要用到该接口, 基本上一定是需要 覆写的 def get_current_user(self): """Override to determine the current user from, e.g., a cookie.""" return None # ---------------------------------------------------- # 如下2个函数, 用于获取 默认配置参数 # - 登录 URL # - 模板路径 # - 支持 # ---------------------------------------------------- def get_login_url(self): """Override to customize the login URL based on the request. By default, we use the 'login_url' application setting. """ self.require_setting("login_url", "@tornado.web.authenticated") return self.application.settings["login_url"] def get_template_path(self): """Override to customize template path for each handler. By default, we use the 'template_path' application setting. Return None to load templates relative to the calling file. """ return self.application.settings.get("template_path") # 预防 跨站攻击 # # - 默认先判断是否记录了 token # - 若已记录, 直接返回 # - 若未记录, 尝试从 cookie 中 获取 # - 若 cookie 中 存在, 从 cookie 中获取,并返回 # - 若 cookie 中 不存在, 主动生成 token, 并同步写入 cookie. (目的是,无需重复生成) # @property def xsrf_token(self): """The XSRF-prevention token for the current user/session. To prevent cross-site request forgery, we set an '_xsrf' cookie and include the same '_xsrf' value as an argument with all POST requests. If the two do not match, we reject the form submission as a potential forgery. See http://en.wikipedia.org/wiki/Cross-site_request_forgery """ if not hasattr(self, "_xsrf_token"): token = self.get_cookie("_xsrf") # cookie 中获取 if not token: token = binascii.b2a_hex(uuid.uuid4().bytes) # token 生成方法 expires_days = 30 if self.current_user else None # token 有效期 self.set_cookie("_xsrf", token, expires_days=expires_days) # 更新 cookie self._xsrf_token = token # 更新 token return self._xsrf_token def check_xsrf_cookie(self): """Verifies that the '_xsrf' cookie matches the '_xsrf' argument. To prevent cross-site request forgery, we set an '_xsrf' cookie and include the same '_xsrf' value as an argument with all POST requests. If the two do not match, we reject the form submission as a potential forgery. See http://en.wikipedia.org/wiki/Cross-site_request_forgery """ if self.request.headers.get("X-Requested-With") == "XMLHttpRequest": return token = self.get_argument("_xsrf", None) if not token: raise HTTPError(403, "'_xsrf' argument missing from POST") if self.xsrf_token != token: raise HTTPError(403, "XSRF cookie does not match POST argument") # 提交表单 - 预防 xsrf 攻击方法 def xsrf_form_html(self): """An HTML <input/> element to be included with all POST forms. It defines the _xsrf input value, which we check on all POST requests to prevent cross-site request forgery. If you have set the 'xsrf_cookies' application setting, you must include this HTML within all of your HTML forms. See check_xsrf_cookie() above for more information. """ # 特别注意: 该 <表单提交> HTML字符串, 要含有 (name="_xsrf") 字段 return '<input type="hidden" name="_xsrf" value="' + \ escape.xhtml_escape(self.xsrf_token) + '"/>' # 静态资源路径 def static_url(self, path): """Returns a static URL for the given relative static file path. This method requires you set the 'static_path' setting in your application (which specifies the root directory of your static files). We append ?v=<signature> to the returned URL, which makes our static file handler set an infinite expiration header on the returned content. The signature is based on the content of the file. If this handler has a "include_host" attribute, we include the full host for every static URL, including the "http://". Set this attribute for handlers whose output needs non-relative static path names. """ self.require_setting("static_path", "static_url") if not hasattr(RequestHandler, "_static_hashes"): RequestHandler._static_hashes = {} hashes = RequestHandler._static_hashes if path not in hashes: try: f = open(os.path.join( self.application.settings["static_path"], path)) hashes[path] = hashlib.md5(f.read()).hexdigest() f.close() except: logging.error("Could not open static file %r", path) hashes[path] = None base = self.request.protocol + "://" + self.request.host \ if getattr(self, "include_host", False) else "" static_url_prefix = self.settings.get('static_url_prefix', '/static/') if hashes.get(path): return base + static_url_prefix + path + "?v=" + hashes[path][:5] else: return base + static_url_prefix + path # 异步回调 def async_callback(self, callback, *args, **kwargs): """Wrap callbacks with this if they are used on asynchronous requests. Catches exceptions and properly finishes the request. """ if callback is None: return None if args or kwargs: callback = functools.partial(callback, *args, **kwargs) def wrapper(*args, **kwargs): try: return callback(*args, **kwargs) except Exception, e: if self._headers_written: logging.error("Exception after headers written", exc_info=True) else: self._handle_request_exception(e) return wrapper def require_setting(self, name, feature="this feature"): """Raises an exception if the given app setting is not defined.""" if not self.application.settings.get(name): raise Exception("You must define the '%s' setting in your " "application to use %s" % (name, feature)) def reverse_url(self, name, *args): return self.application.reverse_url(name, *args) # 关键代码: # def _execute(self, transforms, *args, **kwargs): """Executes this request with the given output transforms.""" self._transforms = transforms try: if self.request.method not in self.SUPPORTED_METHODS: raise HTTPError(405) # If XSRF cookies are turned on, reject form submissions without # the proper cookie if self.request.method == "POST" and \ self.application.settings.get("xsrf_cookies"): self.check_xsrf_cookie() # 检查 self.prepare() # 注意调用时机 if not self._finished: getattr(self, self.request.method.lower())(*args, **kwargs) if self._auto_finish and not self._finished: self.finish() # 关键调用 except Exception, e: self._handle_request_exception(e) def _generate_headers(self): lines = [self.request.version + " " + str(self._status_code) + " " + httplib.responses[self._status_code]] lines.extend(["%s: %s" % (n, v) for n, v in self._headers.iteritems()]) for cookie_dict in getattr(self, "_new_cookies", []): for cookie in cookie_dict.values(): lines.append("Set-Cookie: " + cookie.OutputString(None)) return "\r\n".join(lines) + "\r\n\r\n" # 打印出错日志 def _log(self): if self._status_code < 400: log_method = logging.info elif self._status_code < 500: log_method = logging.warning else: log_method = logging.error request_time = 1000.0 * self.request.request_time() # 日志打印 log_method("%d %s %.2fms", self._status_code, self._request_summary(), request_time) def _request_summary(self): return self.request.method + " " + self.request.uri + " (" + \ self.request.remote_ip + ")" def _handle_request_exception(self, e): if isinstance(e, HTTPError): if e.log_message: format = "%d %s: " + e.log_message args = [e.status_code, self._request_summary()] + list(e.args) logging.warning(format, *args) if e.status_code not in httplib.responses: logging.error("Bad HTTP status code: %d", e.status_code) self.send_error(500, exception=e) else: self.send_error(e.status_code, exception=e) else: logging.error("Uncaught exception %s\n%r", self._request_summary(), self.request, exc_info=e) self.send_error(500, exception=e) def _ui_module(self, name, module): def render(*args, **kwargs): if not hasattr(self, "_active_modules"): self._active_modules = {} if name not in self._active_modules: self._active_modules[name] = module(self) rendered = self._active_modules[name].render(*args, **kwargs) return rendered return render def _ui_method(self, method): return lambda *args, **kwargs: method(self, *args, **kwargs) # 装饰器定义: 异步处理 def asynchronous(method): """Wrap request handler methods with this if they are asynchronous. If this decorator is given, the response is not finished when the method returns. It is up to the request handler to call self.finish() to finish the HTTP request. Without this decorator, the request is automatically finished when the get() or post() method returns. class MyRequestHandler(web.RequestHandler): @web.asynchronous def get(self): http = httpclient.AsyncHTTPClient() http.fetch("http://friendfeed.com/", self._on_download) def _on_download(self, response): self.write("Downloaded!") self.finish() """ @functools.wraps(method) def wrapper(self, *args, **kwargs): if self.application._wsgi: raise Exception("@asynchronous is not supported for WSGI apps") self._auto_finish = False return method(self, *args, **kwargs) return wrapper # 装饰器定义: 去 斜杠(/) def removeslash(method): """Use this decorator to remove trailing slashes from the request path. For example, a request to '/foo/' would redirect to '/foo' with this decorator. Your request handler mapping should use a regular expression like r'/foo/*' in conjunction with using the decorator. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): if self.request.path.endswith("/"): # 结尾含 / if self.request.method == "GET": uri = self.request.path.rstrip("/") # 过滤掉 / if self.request.query: uri += "?" + self.request.query self.redirect(uri) # 重定向 return raise HTTPError(404) return method(self, *args, **kwargs) return wrapper # 装饰器定义: 添加 斜杠(/) def addslash(method): """Use this decorator to add a missing trailing slash to the request path. For example, a request to '/foo' would redirect to '/foo/' with this decorator. Your request handler mapping should use a regular expression like r'/foo/?' in conjunction with using the decorator. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): if not self.request.path.endswith("/"): if self.request.method == "GET": uri = self.request.path + "/" if self.request.query: uri += "?" + self.request.query self.redirect(uri) # 重定向 return raise HTTPError(404) return method(self, *args, **kwargs) return wrapper # ---------------------------------------------------------------- # 入口: # # # ---------------------------------------------------------------- class Application(object): """A collection of request handlers that make up a web application. Instances of this class are callable and can be passed directly to HTTPServer to serve the application: application = web.Application([ (r"/", MainPageHandler), ]) http_server = httpserver.HTTPServer(application) http_server.listen(8080) ioloop.IOLoop.instance().start() The constructor for this class takes in a list of URLSpec objects or (regexp, request_class) tuples. When we receive requests, we iterate over the list in order and instantiate an instance of the first request class whose regexp matches the request path. Each tuple can contain an optional third element, which should be a dictionary if it is present. That dictionary is passed as keyword arguments to the contructor of the handler. This pattern is used for the StaticFileHandler below: application = web.Application([ (r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}), ]) We support virtual hosts with the add_handlers method, which takes in a host regular expression as the first argument: application.add_handlers(r"www\.myhost\.com", [ (r"/article/([0-9]+)", ArticleHandler), ]) You can serve static files by sending the static_path setting as a keyword argument. We will serve those files from the /static/ URI (this is configurable with the static_url_prefix setting), and we will serve /favicon.ico and /robots.txt from the same directory. """ def __init__(self, handlers=None, default_host="", transforms=None, wsgi=False, **settings): """ :param handlers: :param default_host: :param transforms: :param wsgi: :param settings: - gzip : 压缩 - static_path : 静态资源路径 - debug : 调试开关 :return: """ if transforms is None: self.transforms = [] if settings.get("gzip"): # 配置选项 self.transforms.append(GZipContentEncoding) self.transforms.append(ChunkedTransferEncoding) else: self.transforms = transforms self.handlers = [] self.named_handlers = {} self.default_host = default_host self.settings = settings # 自定义配置项 self.ui_modules = {} self.ui_methods = {} self._wsgi = wsgi self._load_ui_modules(settings.get("ui_modules", {})) self._load_ui_methods(settings.get("ui_methods", {})) if self.settings.get("static_path"): # 配置项中含: 静态资源路径 path = self.settings["static_path"] handlers = list(handlers or []) static_url_prefix = settings.get("static_url_prefix", "/static/") handlers = [ (re.escape(static_url_prefix) + r"(.*)", StaticFileHandler, dict(path=path)), (r"/(favicon\.ico)", StaticFileHandler, dict(path=path)), (r"/(robots\.txt)", StaticFileHandler, dict(path=path)), ] + handlers if handlers: self.add_handlers(".*$", handlers) # 关键调用 # Automatically reload modified modules if self.settings.get("debug") and not wsgi: # 调试模式时, 自动监测,并重启项目 import autoreload # tornado 自定义模块 autoreload.start() def add_handlers(self, host_pattern, host_handlers): """Appends the given handlers to our handler list.""" if not host_pattern.endswith("$"): host_pattern += "$" handlers = [] # The handlers with the wildcard host_pattern are a special # case - they're added in the constructor but should have lower # precedence than the more-precise handlers added later. # If a wildcard handler group exists, it should always be last # in the list, so insert new groups just before it. if self.handlers and self.handlers[-1][0].pattern == '.*$': self.handlers.insert(-1, (re.compile(host_pattern), handlers)) # 正则匹配 else: self.handlers.append((re.compile(host_pattern), handlers)) # 正则匹配 for spec in host_handlers: if type(spec) is type(()): # 元组 assert len(spec) in (2, 3) pattern = spec[0] handler = spec[1] if len(spec) == 3: kwargs = spec[2] else: kwargs = {} spec = URLSpec(pattern, handler, kwargs) # 关键调用 handlers.append(spec) if spec.name: if spec.name in self.named_handlers: logging.warning( "Multiple handlers named %s; replacing previous value", spec.name) self.named_handlers[spec.name] = spec def add_transform(self, transform_class): """Adds the given OutputTransform to our transform list.""" self.transforms.append(transform_class) def _get_host_handlers(self, request): host = request.host.lower().split(':')[0] for pattern, handlers in self.handlers: if pattern.match(host): return handlers # Look for default host if not behind load balancer (for debugging) if "X-Real-Ip" not in request.headers: for pattern, handlers in self.handlers: if pattern.match(self.default_host): return handlers return None def _load_ui_methods(self, methods): if type(methods) is types.ModuleType: self._load_ui_methods(dict((n, getattr(methods, n)) for n in dir(methods))) elif isinstance(methods, list): for m in methods: self._load_ui_methods(m) else: for name, fn in methods.iteritems(): if not name.startswith("_") and hasattr(fn, "__call__") \ and name[0].lower() == name[0]: self.ui_methods[name] = fn def _load_ui_modules(self, modules): if type(modules) is types.ModuleType: self._load_ui_modules(dict((n, getattr(modules, n)) for n in dir(modules))) elif isinstance(modules, list): for m in modules: self._load_ui_modules(m) else: assert isinstance(modules, dict) for name, cls in modules.iteritems(): try: if issubclass(cls, UIModule): self.ui_modules[name] = cls except TypeError: pass # 关键定义: 类对象 --> 可调用对象 # # 注意: 被调用时机 # - wsgi.py # - WSGIApplication() # - self.__call__() 方法 # def __call__(self, request): """Called by HTTPServer to execute the request.""" transforms = [t(request) for t in self.transforms] handler = None args = [] kwargs = {} handlers = self._get_host_handlers(request) if not handlers: handler = RedirectHandler( request, "http://" + self.default_host + "/") else: for spec in handlers: match = spec.regex.match(request.path) if match: # None-safe wrapper around urllib.unquote to handle # unmatched optional groups correctly def unquote(s): if s is None: return s return urllib.unquote(s) handler = spec.handler_class(self, request, **spec.kwargs) # Pass matched groups to the handler. Since # match.groups() includes both named and unnamed groups, # we want to use either groups or groupdict but not both. kwargs = dict((k, unquote(v)) for (k, v) in match.groupdict().iteritems()) if kwargs: args = [] else: args = [unquote(s) for s in match.groups()] break if not handler: handler = ErrorHandler(self, request, 404) # In debug mode, re-compile templates and reload static files on every # request so you don't need to restart to see changes if self.settings.get("debug"): if getattr(RequestHandler, "_templates", None): map(lambda loader: loader.reset(), RequestHandler._templates.values()) RequestHandler._static_hashes = {} # 关键代码调用时机: handler._execute(transforms, *args, **kwargs) return handler def reverse_url(self, name, *args): """Returns a URL path for handler named `name` The handler must be added to the application as a named URLSpec """ if name in self.named_handlers: return self.named_handlers[name].reverse(*args) raise KeyError("%s not found in named urls" % name) # ---------------------------------------------------- # 异常基类 # ---------------------------------------------------- class HTTPError(Exception): """An exception that will turn into an HTTP error response.""" def __init__(self, status_code, log_message=None, *args): self.status_code = status_code self.log_message = log_message self.args = args def __str__(self): message = "HTTP %d: %s" % ( self.status_code, httplib.responses[self.status_code]) if self.log_message: return message + " (" + (self.log_message % self.args) + ")" else: return message # ---------------------------------------------------- # 扩展子类: 出错处理 # ---------------------------------------------------- class ErrorHandler(RequestHandler): """Generates an error response with status_code for all requests.""" def __init__(self, application, request, status_code): RequestHandler.__init__(self, application, request) self.set_status(status_code) def prepare(self): raise HTTPError(self._status_code) # ---------------------------------------------------- # 扩展子类: 重定向处理 # ---------------------------------------------------- class RedirectHandler(RequestHandler): """Redirects the client to the given URL for all GET requests. You should provide the keyword argument "url" to the handler, e.g.: application = web.Application([ (r"/oldpath", web.RedirectHandler, {"url": "/newpath"}), ]) """ def __init__(self, application, request, url, permanent=True): RequestHandler.__init__(self, application, request) self._url = url self._permanent = permanent # GET 请求,变成 重定向调用 def get(self): self.redirect(self._url, permanent=self._permanent) # ---------------------------------------------------- # 扩展子类: 静态资源处理 # 说明: # - 覆写 get(), head() 方法 # ---------------------------------------------------- class StaticFileHandler(RequestHandler): """A simple handler that can serve static content from a directory. To map a path to this handler for a static data directory /var/www, you would add a line to your application like: application = web.Application([ (r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}), ]) The local root directory of the content should be passed as the "path" argument to the handler. To support aggressive browser caching, if the argument "v" is given with the path, we set an infinite HTTP expiration header. So, if you want browsers to cache a file indefinitely, send them to, e.g., /static/images/myimage.png?v=xxx. """ def __init__(self, application, request, path): RequestHandler.__init__(self, application, request) self.root = os.path.abspath(path) + os.path.sep def head(self, path): self.get(path, include_body=False) def get(self, path, include_body=True): abspath = os.path.abspath(os.path.join(self.root, path)) if not abspath.startswith(self.root): raise HTTPError(403, "%s is not in root static directory", path) if not os.path.exists(abspath): raise HTTPError(404) if not os.path.isfile(abspath): raise HTTPError(403, "%s is not a file", path) stat_result = os.stat(abspath) modified = datetime.datetime.fromtimestamp(stat_result[stat.ST_MTIME]) self.set_header("Last-Modified", modified) if "v" in self.request.arguments: self.set_header("Expires", datetime.datetime.utcnow() + \ datetime.timedelta(days=365*10)) self.set_header("Cache-Control", "max-age=" + str(86400*365*10)) else: self.set_header("Cache-Control", "public") mime_type, encoding = mimetypes.guess_type(abspath) if mime_type: self.set_header("Content-Type", mime_type) self.set_extra_headers(path) # Check the If-Modified-Since, and don't send the result if the # content has not been modified ims_value = self.request.headers.get("If-Modified-Since") if ims_value is not None: date_tuple = email.utils.parsedate(ims_value) if_since = datetime.datetime.fromtimestamp(time.mktime(date_tuple)) if if_since >= modified: self.set_status(304) return if not include_body: return self.set_header("Content-Length", stat_result[stat.ST_SIZE]) file = open(abspath, "rb") # 读文件 try: self.write(file.read()) # 写出 finally: file.close() def set_extra_headers(self, path): """For subclass to add extra headers to the response""" pass # ---------------------------------------------------- # 扩展子类: 包裹 另外一个 回调 # 说明: # - 覆写 prepare() 预定义接口 # ---------------------------------------------------- class FallbackHandler(RequestHandler): """A RequestHandler that wraps another HTTP server callback. The fallback is a callable object that accepts an HTTPRequest, such as an Application or tornado.wsgi.WSGIContainer. This is most useful to use both tornado RequestHandlers and WSGI in the same server. Typical usage: wsgi_app = tornado.wsgi.WSGIContainer( django.core.handlers.wsgi.WSGIHandler()) application = tornado.web.Application([ (r"/foo", FooHandler), (r".*", FallbackHandler, dict(fallback=wsgi_app), ]) """ def __init__(self, app, request, fallback): RequestHandler.__init__(self, app, request) self.fallback = fallback # 覆写接口 def prepare(self): self.fallback(self.request) self._finished = True # ---------------------------------------------------- # 自定义基类: 输出转换 # 说明: # - 2个子类 # - GZipContentEncoding() # - ChunkedTransferEncoding() # ---------------------------------------------------- class OutputTransform(object): """A transform modifies the result of an HTTP request (e.g., GZip encoding) A new transform instance is created for every request. See the ChunkedTransferEncoding example below if you want to implement a new Transform. """ def __init__(self, request): pass def transform_first_chunk(self, headers, chunk, finishing): return headers, chunk def transform_chunk(self, chunk, finishing): return chunk class GZipContentEncoding(OutputTransform): """Applies the gzip content encoding to the response. See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11 """ CONTENT_TYPES = set([ "text/plain", "text/html", "text/css", "text/xml", "application/x-javascript", "application/xml", "application/atom+xml", "text/javascript", "application/json", "application/xhtml+xml"]) MIN_LENGTH = 5 def __init__(self, request): self._gzipping = request.supports_http_1_1() and \ "gzip" in request.headers.get("Accept-Encoding", "") def transform_first_chunk(self, headers, chunk, finishing): if self._gzipping: ctype = headers.get("Content-Type", "").split(";")[0] self._gzipping = (ctype in self.CONTENT_TYPES) and \ (not finishing or len(chunk) >= self.MIN_LENGTH) and \ (finishing or "Content-Length" not in headers) and \ ("Content-Encoding" not in headers) if self._gzipping: headers["Content-Encoding"] = "gzip" self._gzip_value = cStringIO.StringIO() self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value) self._gzip_pos = 0 chunk = self.transform_chunk(chunk, finishing) # 关键调用 if "Content-Length" in headers: headers["Content-Length"] = str(len(chunk)) return headers, chunk def transform_chunk(self, chunk, finishing): if self._gzipping: self._gzip_file.write(chunk) if finishing: self._gzip_file.close() else: self._gzip_file.flush() chunk = self._gzip_value.getvalue() if self._gzip_pos > 0: chunk = chunk[self._gzip_pos:] self._gzip_pos += len(chunk) return chunk class ChunkedTransferEncoding(OutputTransform): """Applies the chunked transfer encoding to the response. See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1 """ def __init__(self, request): self._chunking = request.supports_http_1_1() def transform_first_chunk(self, headers, chunk, finishing): if self._chunking: # No need to chunk the output if a Content-Length is specified if "Content-Length" in headers or "Transfer-Encoding" in headers: self._chunking = False else: headers["Transfer-Encoding"] = "chunked" chunk = self.transform_chunk(chunk, finishing) return headers, chunk def transform_chunk(self, block, finishing): if self._chunking: # Don't write out empty chunks because that means END-OF-STREAM # with chunked encoding if block: block = ("%x" % len(block)) + "\r\n" + block + "\r\n" if finishing: block += "0\r\n\r\n" return block # ---------------------------------------------------- # 装饰器定义: 权限认证 # 代码功能逻辑: # - 若当前用户已登录, 正常调用 # - 若当前用户未登录 # - 若是 GET 请求, # - 先获取 login(网站登录页面) URL # - URL中, 记录 next 字段参数, 记录 未登录前 访问的页面 # - 重定向到 login 页面 # - 正确登录后, 会根据 next 参数, 自动跳转到 登录前的页面 # - 其他请求, 直接抛出 403 错误页面 # 批注: # - 权限验证的典型实现, 值得学习 # - 代码很精简, 并不复杂 # ---------------------------------------------------- def authenticated(method): """Decorate methods with this to require that the user be logged in.""" @functools.wraps(method) def wrapper(self, *args, **kwargs): if not self.current_user: # 用户未登录 if self.request.method == "GET": # GET 请求 处理 url = self.get_login_url() # 获取登录页面的 URL if "?" not in url: # 关键处理: # - 在 URL 中,添加 <next>字段 [格式: ?next=/xxxx.html] # - 目的: 当用户成功登录后,返回到登录前,访问的页面 url += "?" + urllib.urlencode(dict(next=self.request.uri)) self.redirect(url) # 重定向 return raise HTTPError(403) # 其他请求, 抛出 403 错误 return method(self, *args, **kwargs) # 用户已登录时, 正常调用 return wrapper # ---------------------------------------------------- # 预定义接口类: UI模块 (处理 CSS,JS) # 说明: # - 预定义了一些接口方法,需要 子类化, 并覆写后,才可使用 # ---------------------------------------------------- class UIModule(object): """A UI re-usable, modular unit on a page. UI modules often execute additional queries, and they can include additional CSS and JavaScript that will be included in the output page, which is automatically inserted on page render. """ def __init__(self, handler): self.handler = handler self.request = handler.request self.ui = handler.ui self.current_user = handler.current_user self.locale = handler.locale # 预定义接口: 必须要 覆写,才能用 def render(self, *args, **kwargs): raise NotImplementedError() def embedded_javascript(self): """Returns a JavaScript string that will be embedded in the page.""" return None def javascript_files(self): """Returns a list of JavaScript files required by this module.""" return None def embedded_css(self): """Returns a CSS string that will be embedded in the page.""" return None def css_files(self): """Returns a list of CSS files required by this module.""" return None def html_head(self): """Returns a CSS string that will be put in the <head/> element""" return None def html_body(self): """Returns an HTML string that will be put in the <body/> element""" return None def render_string(self, path, **kwargs): return self.handler.render_string(path, **kwargs) # ---------------------------------------------------- # 预定义接口类: URL 匹配 # 说明: # - URL 与 handler 映射 # ---------------------------------------------------- class URLSpec(object): """Specifies mappings between URLs and handlers.""" def __init__(self, pattern, handler_class, kwargs={}, name=None): """Creates a URLSpec. Parameters: pattern: Regular expression to be matched. Any groups in the regex will be passed in to the handler's get/post/etc methods as arguments. handler_class: RequestHandler subclass to be invoked. kwargs (optional): A dictionary of additional arguments to be passed to the handler's constructor. name (optional): A name for this handler. Used by Application.reverse_url. """ if not pattern.endswith('$'): pattern += '$' self.regex = re.compile(pattern) # 正则匹配 self.handler_class = handler_class self.kwargs = kwargs self.name = name self._path, self._group_count = self._find_groups() def _find_groups(self): """Returns a tuple (reverse string, group count) for a url. For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method would return ('/%s/%s/', 2). """ pattern = self.regex.pattern if pattern.startswith('^'): pattern = pattern[1:] if pattern.endswith('$'): pattern = pattern[:-1] if self.regex.groups != pattern.count('('): # The pattern is too complicated for our simplistic matching, # so we can't support reversing it. return (None, None) pieces = [] for fragment in pattern.split('('): if ')' in fragment: paren_loc = fragment.index(')') if paren_loc >= 0: pieces.append('%s' + fragment[paren_loc + 1:]) else: pieces.append(fragment) return (''.join(pieces), self.regex.groups) def reverse(self, *args): assert self._path is not None, \ "Cannot reverse url regex " + self.regex.pattern assert len(args) == self._group_count, "required number of arguments "\ "not found" if not len(args): return self._path return self._path % tuple([str(a) for a in args]) url = URLSpec # ---------------------------------------------------- # UTF8 编码处理: 编码检查 # 代码逻辑: # - 若 s 是 unicode 字符串 # - 使用 UTF8编码,并返回 # - 若 s 不是 字符串类型, 直接报错 # - 若 s 是 ASCII 字符串, 直接返回 # ---------------------------------------------------- def _utf8(s): if isinstance(s, unicode): return s.encode("utf-8") assert isinstance(s, str) return s # ---------------------------------------------------- # unicode 编码处理: 编码检查 # 代码逻辑: # - 基本类似 _utf8() 函数 # ---------------------------------------------------- def _unicode(s): if isinstance(s, str): try: return s.decode("utf-8") except UnicodeDecodeError: raise HTTPError(400, "Non-utf8 argument") assert isinstance(s, unicode) return s def _time_independent_equals(a, b): if len(a) != len(b): return False result = 0 for x, y in zip(a, b): result |= ord(x) ^ ord(y) return result == 0 class _O(dict): """Makes a dictionary behave like an object.""" def __getattr__(self, name): try: return self[name] except KeyError: raise AttributeError(name) def __setattr__(self, name, value): self[name] = value
hhstore/tornado-annotated
src/tornado-1.0.0/tornado/web.py
Python
mit
66,814
""" This script exposes a class used to read the Shapefile Index format used in conjunction with a shapefile. The Index file gives the record number and content length for every record stored in the main shapefile. This is useful if you need to extract specific features from a shapefile without reading the entire file. How to use: from ShapefileIndexReader import ShapefileIndex shx = ShapefileIndex(Path/To/index.shx) shx.read() The 'shx' object will expose three properties 1) Path - the path given to the shapefile, if it exists 2) Offsets - an array of byte offsets for each record in the main shapefile 3) Lengths - an array of 16-bit word lengths for each record in the main shapefile """ import os __author__ = 'Sean Taylor Hutchison' __license__ = 'MIT' __version__ = '0.1.0' __maintainer__ = 'Sean Taylor Hutchison' __email__ = '[email protected]' __status__ = 'Development' class ShapefileIndex: Records = [] def __bytes_to_index_records(self,file_bytes): file_length = len(file_bytes) num_records = int((file_length - 100) / 8) for record_counter in range(0,num_records): byte_position = 100 + (record_counter * 8) offset = int.from_bytes(file_bytes[byte_position:byte_position+4], byteorder='big') length = int.from_bytes(file_bytes[byte_position+4:byte_position+8], byteorder='big') self.Records.append([offset,length]) def read(self): with open(self.Path, 'rb') as shpindex: self.__bytes_to_index_records(shpindex.read()) def __init__(self, path=None): if path and os.path.exists(path) and os.path.splitext(path)[1] == '.shx': self.Path = path else: raise FileNotFoundError
taylorhutchison/ShapefileReaderPy
ShapefileIndexReader.py
Python
mit
1,778
# coding: utf-8 from app.api_client.error import HTTPError from app.helpers.login_helpers import generate_buyer_creation_token from dmapiclient.audit import AuditTypes from dmutils.email import generate_token, EmailError from dmutils.forms import FakeCsrf from ...helpers import BaseApplicationTest from lxml import html import mock import pytest from flask import session import flask_featureflags as feature EMAIL_SENT_MESSAGE = "send a link" USER_CREATION_EMAIL_ERROR = "Failed to send user creation email." PASSWORD_RESET_EMAIL_ERROR = "Failed to send password reset." TOKEN_CREATED_BEFORE_PASSWORD_LAST_CHANGED_ERROR = "This password reset link is invalid." USER_LINK_EXPIRED_ERROR = "The link you used to create an account may have expired." def has_validation_errors(data, field_name): document = html.fromstring(data) form_field = document.xpath('//input[@name="{}"]'.format(field_name)) return 'invalid' in form_field[0].classes or 'invalid' in form_field[0].getparent().classes class TestLogin(BaseApplicationTest): def setup(self): super(TestLogin, self).setup() data_api_client_config = {'authenticate_user.return_value': self.user( 123, "[email protected]", 1234, 'name', 'name' )} self._data_api_client = mock.patch( 'app.main.views.login.data_api_client', **data_api_client_config ) self.data_api_client_mock = self._data_api_client.start() def teardown(self): self._data_api_client.stop() def test_should_show_login_page(self): res = self.client.get(self.expand_path('/login')) assert res.status_code == 200 assert 'private' in res.headers['Cache-Control'] assert "Sign in to the Marketplace" in res.get_data(as_text=True) @mock.patch('app.main.views.login.data_api_client') def test_redirect_on_buyer_login(self, data_api_client): with self.app.app_context(): data_api_client.authenticate_user.return_value = self.user(123, "[email protected]", None, None, 'Name') data_api_client.get_user.return_value = self.user(123, "[email protected]", None, None, 'Name') res = self.client.post(self.url_for('main.process_login'), data={ 'email_address': '[email protected]', 'password': '1234567890', 'csrf_token': FakeCsrf.valid_token, }) assert res.status_code == 302 assert res.location == 'http://localhost/2/buyer-dashboard' assert 'Secure;' in res.headers['Set-Cookie'] @mock.patch('app.main.views.login.data_api_client') def test_redirect_on_supplier_login(self, data_api_client): with self.app.app_context(): data_api_client.authenticate_user.return_value = self.user( 123, '[email protected]', None, None, 'Name', role='supplier' ) data_api_client.get_user.return_value = self.user( 123, '[email protected]', None, None, 'Name', role='supplier' ) res = self.client.post(self.url_for('main.process_login'), data={ 'email_address': '[email protected]', 'password': '1234567890', 'csrf_token': FakeCsrf.valid_token, }) assert res.status_code == 302 assert res.location == 'http://localhost' + \ self.expand_path('/2/seller-dashboard') assert 'Secure;' in res.headers['Set-Cookie'] def test_should_redirect_logged_in_buyer(self): self.login_as_buyer() res = self.client.get(self.url_for('main.render_login')) assert res.status_code == 302 assert res.location == 'http://localhost/2/buyer-dashboard' def test_should_strip_whitespace_surrounding_login_email_address_field(self): self.client.post(self.expand_path('/login'), data={ 'email_address': ' [email protected] ', 'password': '1234567890', 'csrf_token': FakeCsrf.valid_token, }) self.data_api_client_mock.authenticate_user.assert_called_with('[email protected]', '1234567890') def test_should_not_strip_whitespace_surrounding_login_password_field(self): self.client.post(self.expand_path('/login'), data={ 'email_address': '[email protected]', 'password': ' 1234567890 ', 'csrf_token': FakeCsrf.valid_token, }) self.data_api_client_mock.authenticate_user.assert_called_with( '[email protected]', ' 1234567890 ') @mock.patch('app.main.views.login.data_api_client') def test_ok_next_url_redirects_buyer_on_login(self, data_api_client): with self.app.app_context(): data_api_client.authenticate_user.return_value = self.user(123, "[email protected]", None, None, 'Name') data_api_client.get_user.return_value = self.user(123, "[email protected]", None, None, 'Name') data = { 'email_address': '[email protected]', 'password': '1234567890', 'csrf_token': FakeCsrf.valid_token, } res = self.client.post(self.expand_path('/login?next={}'.format(self.expand_path('/bar-foo'))), data=data) assert res.status_code == 302 assert res.location == 'http://localhost' + self.expand_path('/bar-foo') @mock.patch('app.main.views.login.data_api_client') def test_bad_next_url_redirects_user(self, data_api_client): with self.app.app_context(): data_api_client.authenticate_user.return_value = self.user(123, "[email protected]", None, None, 'Name') data_api_client.get_user.return_value = self.user(123, "[email protected]", None, None, 'Name') data = { 'email_address': '[email protected]', 'password': '1234567890', 'csrf_token': FakeCsrf.valid_token, } res = self.client.post(self.expand_path('/login?next=http://badness.com'), data=data) assert res.status_code == 302 assert res.location == 'http://localhost/2/buyer-dashboard' def test_should_have_cookie_on_redirect(self): with self.app.app_context(): self.app.config['SESSION_COOKIE_DOMAIN'] = '127.0.0.1' self.app.config['SESSION_COOKIE_SECURE'] = True res = self.client.post(self.expand_path('/login'), data={ 'email_address': '[email protected]', 'password': '1234567890', 'csrf_token': FakeCsrf.valid_token, }) cookie_value = self.get_cookie_by_name(res, 'dm_session') assert cookie_value['dm_session'] is not None assert cookie_value["Domain"] == "127.0.0.1" def test_should_redirect_to_login_on_logout(self): res = self.client.get(self.expand_path('/logout')) assert res.status_code == 302 assert res.location == 'http://localhost/2/login' @mock.patch('app.main.views.login.data_api_client') def test_should_return_a_403_for_invalid_login(self, data_api_client): data_api_client.authenticate_user.return_value = None data_api_client.get_user.return_value = None res = self.client.post(self.expand_path('/login'), data={ 'email_address': '[email protected]', 'password': '1234567890', 'csrf_token': FakeCsrf.valid_token, }) assert self.strip_all_whitespace("Make sure you've entered the right email address and password") \ in self.strip_all_whitespace(res.get_data(as_text=True)) assert res.status_code == 403 def test_should_be_validation_error_if_no_email_or_password(self): res = self.client.post(self.expand_path('/login'), data={'csrf_token': FakeCsrf.valid_token}) data = res.get_data(as_text=True) assert res.status_code == 400 assert has_validation_errors(data, 'email_address') assert has_validation_errors(data, 'password') def test_should_be_validation_error_if_invalid_email(self): res = self.client.post(self.expand_path('/login'), data={ 'email_address': 'invalid', 'password': '1234567890', 'csrf_token': FakeCsrf.valid_token, }) data = res.get_data(as_text=True) assert res.status_code == 400 assert has_validation_errors(data, 'email_address') assert not has_validation_errors(data, 'password') def test_valid_email_formats(self): cases = [ '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', ] for address in cases: res = self.client.post(self.expand_path('/login'), data={ 'email_address': address, 'password': '1234567890', 'csrf_token': FakeCsrf.valid_token, }) data = res.get_data(as_text=True) assert res.status_code == 302, address def test_invalid_email_formats(self): cases = [ '', 'bad', 'bad@@example.com', 'bad @example.com', '[email protected]', 'bad.example.com', '@', '@example.com', 'bad@', '[email protected],[email protected]', '[email protected] [email protected]', '[email protected],other.example.com', ] for address in cases: res = self.client.post(self.expand_path('/login'), data={ 'email_address': address, 'password': '1234567890', 'csrf_token': FakeCsrf.valid_token, }) data = res.get_data(as_text=True) assert res.status_code == 400, address assert has_validation_errors(data, 'email_address'), address class TestLoginFormsNotAutofillable(BaseApplicationTest): def _forms_and_inputs_not_autofillable(self, url, expected_title): response = self.client.get(url) assert response.status_code == 200 document = html.fromstring(response.get_data(as_text=True)) page_title = document.xpath('//h1/text()')[0].strip() assert expected_title == page_title forms = document.xpath('//main[@id="content"]//form') for form in forms: assert form.get('autocomplete') == "off" non_hidden_inputs = form.xpath('//input[@type!="hidden"]') for input in non_hidden_inputs: if input.get('type') != 'submit': assert input.get('autocomplete') == "off" def test_login_form_and_inputs_not_autofillable(self): self._forms_and_inputs_not_autofillable( self.expand_path('/login'), "Sign in to the Marketplace" ) @pytest.mark.skip def test_request_password_reset_form_and_inputs_not_autofillable(self): self._forms_and_inputs_not_autofillable( self.expand_path('/reset-password'), "Reset password" ) @pytest.mark.skip @mock.patch('app.main.views.login.data_api_client') def test_reset_password_form_and_inputs_not_autofillable( self, data_api_client ): data_api_client.get_user.return_value = self.user( 123, "[email protected]", 1234, 'email', 'name' ) with self.app.app_context(): token = generate_token( { "user": 123, "email": '[email protected]', }, self.app.config['SECRET_KEY'], self.app.config['RESET_PASSWORD_SALT']) url = self.expand_path('/reset-password/{}').format(token) self._forms_and_inputs_not_autofillable( url, "Reset password", ) class TestTermsUpdate(BaseApplicationTest): payload = { 'csrf_token': FakeCsrf.valid_token, 'accept_terms': 'y', } def test_page_load(self): with self.app.app_context(): self.login_as_buyer() res = self.client.get(self.url_for('main.terms_updated')) assert res.status_code == 200 assert 'terms' in res.get_data(as_text=True) def test_login_required(self): with self.app.app_context(): # Not logged in res = self.client.get(self.url_for('main.terms_updated')) assert res.status_code == 302 @mock.patch('app.main.views.login.terms_of_use') @mock.patch('app.main.views.login.data_api_client') def test_submit(self, data_api_client, terms_of_use): with self.app.app_context(): self.login_as_buyer(user_id=42) res = self.client.post(self.url_for('main.accept_updated_terms'), data=self.payload) data_api_client.update_user.assert_called_once_with(42, fields=mock.ANY) terms_of_use.set_session_flag.assert_called_once_with(False) assert res.status_code == 302 @mock.patch('app.main.views.login.data_api_client') def test_submit_requires_login(self, data_api_client): with self.app.app_context(): # Not logged in res = self.client.post(self.url_for('main.accept_updated_terms'), data=self.payload) data_api_client.update_user.assert_not_called() assert res.status_code == 302 assert res.location.startswith(self.url_for('main.render_login', _external=True)) @mock.patch('app.main.views.login.data_api_client') def test_submit_without_accepting(self, data_api_client): with self.app.app_context(): self.login_as_buyer() data = dict(self.payload) data.pop('accept_terms') res = self.client.post(self.url_for('main.accept_updated_terms'), data=data) data_api_client.update_user.assert_not_called() assert res.status_code == 400
AusDTO/dto-digitalmarketplace-buyer-frontend
tests/app/views/test_login.py
Python
mit
14,240
#! /usr/bin/env python3 import asyncio import subprocess import numpy as np import time comm = None class Camera: def __init__(self, notify): self._process = None self._now_pos = np.array([0., 0., 0.]) self._running = False self._notify = notify @asyncio.coroutine def connect(self): self._process = yield from asyncio.create_subprocess_exec( 'python2', 'camera.py', stdin=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE ) self._running = True @asyncio.coroutine def run(self): while self._running: data = yield from self._process.stdout.readline() print(data) self._now_pos = np.array(list(map(float, data.split()))) yield from self._notify(time.time(), self._now_pos) def stop(self): self._running = False self._process.terminate()
AlphaLambdaMuPi/CamDrone
camera3.py
Python
mit
937
# -*- coding: utf-8 -*- from __future__ import unicode_literals from abc import ABCMeta, abstractmethod from django.core.files import File from six import with_metaclass from django.utils.module_loading import import_string from rest_framework_tus import signals from .settings import TUS_SAVE_HANDLER_CLASS class AbstractUploadSaveHandler(with_metaclass(ABCMeta, object)): def __init__(self, upload): self.upload = upload @abstractmethod def handle_save(self): pass def run(self): # Trigger state change self.upload.start_saving() self.upload.save() # Initialize saving self.handle_save() def finish(self): # Trigger signal signals.saved.send(sender=self.__class__, instance=self) # Finish self.upload.finish() self.upload.save() class DefaultSaveHandler(AbstractUploadSaveHandler): destination_file_field = 'uploaded_file' def handle_save(self): # Save temporary field to file field file_field = getattr(self.upload, self.destination_file_field) file_field.save(self.upload.filename, File(open(self.upload.temporary_file_path))) # Finish upload self.finish() def get_save_handler(import_path=None): return import_string(import_path or TUS_SAVE_HANDLER_CLASS)
dirkmoors/drf-tus
rest_framework_tus/storage.py
Python
mit
1,346