repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
evaimg/evaimg.github.com-src | fabfile.py | 1 | 1202 | from fabric.api import *
import fabric.contrib.project as project
import os
# Local path configuration (can be absolute or relative to fabfile)
env.deploy_path = 'output'
DEPLOY_PATH = env.deploy_path
# Remote server configuration
production = 'root@localhost:22'
dest_path = '/var/www'
def clean():
if os.path.isdir(DEPLOY_PATH):
local('rm -rf {deploy_path}'.format(**env))
local('mkdir {deploy_path}'.format(**env))
def build():
local('pelican -s pelicanconf.py')
def rebuild():
clean()
build()
def regenerate():
local('pelican -r -s pelicanconf.py')
def serve():
local('cd {deploy_path} && python -m SimpleHTTPServer'.format(**env))
def reserve():
build()
serve()
def preview():
local('pelican -s publishconf.py')
def github():
#if os.path.isdir(DEPLOY_PATH):
# local('ghp-import {deploy_path}'.format(**env))
# local('git push origin gh-pages')
print('WARNING:To be supported!')
@hosts(production)
def publish():
local('pelican -s publishconf.py')
project.rsync_project(
remote_dir=dest_path,
exclude=".DS_Store",
local_dir=DEPLOY_PATH.rstrip('/') + '/',
delete=True
)
| gpl-3.0 | 5,417,073,050,236,019,000 | 22.115385 | 73 | 0.638103 | false | 3.434286 | false | false | false |
Iconik/eve-suite | src/view/mainwindow/ui_main_window.py | 1 | 11751 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_main_window.ui'
#
# Created: Thu Mar 17 23:49:49 2011
# by: pyside-uic 0.2.7 running on PySide 1.0.0~rc1
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(670, 494)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtGui.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.widget_3 = QtGui.QWidget(self.centralwidget)
self.widget_3.setObjectName("widget_3")
self.verticalLayout_3 = QtGui.QVBoxLayout(self.widget_3)
self.verticalLayout_3.setMargin(0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.pushButton_3 = QtGui.QPushButton(self.widget_3)
self.pushButton_3.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/icons/icon17_04.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_3.setIcon(icon)
self.pushButton_3.setIconSize(QtCore.QSize(64, 64))
self.pushButton_3.setFlat(False)
self.pushButton_3.setObjectName("pushButton_3")
self.verticalLayout_3.addWidget(self.pushButton_3)
self.label_3 = QtGui.QLabel(self.widget_3)
self.label_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_3.setObjectName("label_3")
self.verticalLayout_3.addWidget(self.label_3)
self.gridLayout.addWidget(self.widget_3, 2, 5, 1, 1)
self.widget_4 = QtGui.QWidget(self.centralwidget)
self.widget_4.setObjectName("widget_4")
self.verticalLayout_4 = QtGui.QVBoxLayout(self.widget_4)
self.verticalLayout_4.setMargin(0)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.pushButton_4 = QtGui.QPushButton(self.widget_4)
self.pushButton_4.setText("")
self.pushButton_4.setObjectName("pushButton_4")
self.verticalLayout_4.addWidget(self.pushButton_4)
self.label_4 = QtGui.QLabel(self.widget_4)
self.label_4.setObjectName("label_4")
self.verticalLayout_4.addWidget(self.label_4)
self.gridLayout.addWidget(self.widget_4, 2, 6, 1, 1)
self.widget = QtGui.QWidget(self.centralwidget)
self.widget.setObjectName("widget")
self.verticalLayout = QtGui.QVBoxLayout(self.widget)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName("verticalLayout")
self.pushButton = QtGui.QPushButton(self.widget)
self.pushButton.setText("")
self.pushButton.setAutoDefault(False)
self.pushButton.setDefault(False)
self.pushButton.setFlat(False)
self.pushButton.setObjectName("pushButton")
self.verticalLayout.addWidget(self.pushButton)
self.label = QtGui.QLabel(self.widget)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.gridLayout.addWidget(self.widget, 2, 3, 1, 1)
self.widget_2 = QtGui.QWidget(self.centralwidget)
self.widget_2.setObjectName("widget_2")
self.verticalLayout_2 = QtGui.QVBoxLayout(self.widget_2)
self.verticalLayout_2.setMargin(0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.pushButton_2 = QtGui.QPushButton(self.widget_2)
self.pushButton_2.setText("")
self.pushButton_2.setIconSize(QtCore.QSize(64, 64))
self.pushButton_2.setObjectName("pushButton_2")
self.verticalLayout_2.addWidget(self.pushButton_2)
self.label_2 = QtGui.QLabel(self.widget_2)
self.label_2.setObjectName("label_2")
self.verticalLayout_2.addWidget(self.label_2)
self.gridLayout.addWidget(self.widget_2, 2, 4, 1, 1)
self.widget_5 = QtGui.QWidget(self.centralwidget)
self.widget_5.setObjectName("widget_5")
self.verticalLayout_5 = QtGui.QVBoxLayout(self.widget_5)
self.verticalLayout_5.setMargin(0)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.manufacturing_calculator_button = QtGui.QPushButton(self.widget_5)
self.manufacturing_calculator_button.setText("")
self.manufacturing_calculator_button.setObjectName("manufacturing_calculator_button")
self.verticalLayout_5.addWidget(self.manufacturing_calculator_button)
self.label_5 = QtGui.QLabel(self.widget_5)
self.label_5.setObjectName("label_5")
self.verticalLayout_5.addWidget(self.label_5)
self.gridLayout.addWidget(self.widget_5, 3, 3, 1, 1)
self.widget_6 = QtGui.QWidget(self.centralwidget)
self.widget_6.setObjectName("widget_6")
self.verticalLayout_9 = QtGui.QVBoxLayout(self.widget_6)
self.verticalLayout_9.setMargin(0)
self.verticalLayout_9.setObjectName("verticalLayout_9")
self.pushButton_9 = QtGui.QPushButton(self.widget_6)
self.pushButton_9.setText("")
self.pushButton_9.setObjectName("pushButton_9")
self.verticalLayout_9.addWidget(self.pushButton_9)
self.label_9 = QtGui.QLabel(self.widget_6)
self.label_9.setObjectName("label_9")
self.verticalLayout_9.addWidget(self.label_9)
self.gridLayout.addWidget(self.widget_6, 4, 3, 1, 1)
self.widget_8 = QtGui.QWidget(self.centralwidget)
self.widget_8.setObjectName("widget_8")
self.verticalLayout_6 = QtGui.QVBoxLayout(self.widget_8)
self.verticalLayout_6.setMargin(0)
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.pushButton_6 = QtGui.QPushButton(self.widget_8)
self.pushButton_6.setText("")
self.pushButton_6.setObjectName("pushButton_6")
self.verticalLayout_6.addWidget(self.pushButton_6)
self.label_6 = QtGui.QLabel(self.widget_8)
self.label_6.setObjectName("label_6")
self.verticalLayout_6.addWidget(self.label_6)
self.gridLayout.addWidget(self.widget_8, 3, 4, 1, 1)
self.widget_9 = QtGui.QWidget(self.centralwidget)
self.widget_9.setObjectName("widget_9")
self.verticalLayout_7 = QtGui.QVBoxLayout(self.widget_9)
self.verticalLayout_7.setMargin(0)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.pushButton_7 = QtGui.QPushButton(self.widget_9)
self.pushButton_7.setText("")
self.pushButton_7.setObjectName("pushButton_7")
self.verticalLayout_7.addWidget(self.pushButton_7)
self.label_7 = QtGui.QLabel(self.widget_9)
self.label_7.setObjectName("label_7")
self.verticalLayout_7.addWidget(self.label_7)
self.gridLayout.addWidget(self.widget_9, 3, 5, 1, 1)
self.widget_10 = QtGui.QWidget(self.centralwidget)
self.widget_10.setObjectName("widget_10")
self.verticalLayout_8 = QtGui.QVBoxLayout(self.widget_10)
self.verticalLayout_8.setMargin(0)
self.verticalLayout_8.setObjectName("verticalLayout_8")
self.research_calculator_button = QtGui.QPushButton(self.widget_10)
self.research_calculator_button.setText("")
self.research_calculator_button.setObjectName("research_calculator_button")
self.verticalLayout_8.addWidget(self.research_calculator_button)
self.label_8 = QtGui.QLabel(self.widget_10)
self.label_8.setObjectName("label_8")
self.verticalLayout_8.addWidget(self.label_8)
self.gridLayout.addWidget(self.widget_10, 3, 6, 1, 1)
self.widget_11 = QtGui.QWidget(self.centralwidget)
self.widget_11.setObjectName("widget_11")
self.verticalLayout_10 = QtGui.QVBoxLayout(self.widget_11)
self.verticalLayout_10.setMargin(0)
self.verticalLayout_10.setObjectName("verticalLayout_10")
self.pushButton_10 = QtGui.QPushButton(self.widget_11)
self.pushButton_10.setText("")
self.pushButton_10.setObjectName("pushButton_10")
self.verticalLayout_10.addWidget(self.pushButton_10)
self.label_10 = QtGui.QLabel(self.widget_11)
self.label_10.setObjectName("label_10")
self.verticalLayout_10.addWidget(self.label_10)
self.gridLayout.addWidget(self.widget_11, 4, 4, 1, 1)
self.widget_12 = QtGui.QWidget(self.centralwidget)
self.widget_12.setObjectName("widget_12")
self.verticalLayout_11 = QtGui.QVBoxLayout(self.widget_12)
self.verticalLayout_11.setMargin(0)
self.verticalLayout_11.setObjectName("verticalLayout_11")
self.pushButton_11 = QtGui.QPushButton(self.widget_12)
self.pushButton_11.setText("")
self.pushButton_11.setObjectName("pushButton_11")
self.verticalLayout_11.addWidget(self.pushButton_11)
self.label_11 = QtGui.QLabel(self.widget_12)
self.label_11.setObjectName("label_11")
self.verticalLayout_11.addWidget(self.label_11)
self.gridLayout.addWidget(self.widget_12, 4, 5, 1, 1)
self.widget_13 = QtGui.QWidget(self.centralwidget)
self.widget_13.setObjectName("widget_13")
self.verticalLayout_12 = QtGui.QVBoxLayout(self.widget_13)
self.verticalLayout_12.setMargin(0)
self.verticalLayout_12.setObjectName("verticalLayout_12")
self.pushButton_12 = QtGui.QPushButton(self.widget_13)
self.pushButton_12.setText("")
self.pushButton_12.setObjectName("pushButton_12")
self.verticalLayout_12.addWidget(self.pushButton_12)
self.label_12 = QtGui.QLabel(self.widget_13)
self.label_12.setObjectName("label_12")
self.verticalLayout_12.addWidget(self.label_12)
self.gridLayout.addWidget(self.widget_13, 4, 6, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "EVE Suite", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("MainWindow", "Ship Fitter", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("MainWindow", "Item Browser", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("MainWindow", "Asset Viewer", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("MainWindow", "Character Viewer", None, QtGui.QApplication.UnicodeUTF8))
self.label_5.setText(QtGui.QApplication.translate("MainWindow", "Manufacturing Calculator", None, QtGui.QApplication.UnicodeUTF8))
self.label_9.setText(QtGui.QApplication.translate("MainWindow", "POS Fitter", None, QtGui.QApplication.UnicodeUTF8))
self.label_6.setText(QtGui.QApplication.translate("MainWindow", "Mining Calculator", None, QtGui.QApplication.UnicodeUTF8))
self.label_7.setText(QtGui.QApplication.translate("MainWindow", "Refine Calculator", None, QtGui.QApplication.UnicodeUTF8))
self.label_8.setText(QtGui.QApplication.translate("MainWindow", "Research Calculator", None, QtGui.QApplication.UnicodeUTF8))
self.label_10.setText(QtGui.QApplication.translate("MainWindow", "Skill Browser", None, QtGui.QApplication.UnicodeUTF8))
self.label_11.setText(QtGui.QApplication.translate("MainWindow", "Skill Planner", None, QtGui.QApplication.UnicodeUTF8))
self.label_12.setText(QtGui.QApplication.translate("MainWindow", "Travel Planner", None, QtGui.QApplication.UnicodeUTF8))
import rc_main_window_rc
import rc_main_window_rc
| gpl-3.0 | -2,131,063,382,454,420,000 | 55.768116 | 138 | 0.698068 | false | 3.797996 | false | false | false |
brews/tellervo-python | setup.py | 1 | 2727 | """A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
setup(
name='tellervo-python',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='0.0.1',
description='Python client to the Tellervo dendrochronology suite',
# The project's main homepage.
url='https://github.com/brews/tellervo-python',
# Author details
author='S. Brewster Malevich',
author_email='[email protected]',
# Choose your license
license='GPLv3',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Software Development',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
keywords='client dendrochronology development',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs']),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
# py_modules=["my_module"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['lxml'],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
'dev': ['check-manifest'],
'test': ['coverage'],
}
)
| gpl-3.0 | -3,332,022,091,024,898,600 | 32.666667 | 85 | 0.669967 | false | 4.106928 | false | false | false |
zrathustra/mmap | ggh.py | 1 | 3730 | import random as rand
from sage.all import ZZ
from sage.rings.all import RR, QQ, PolynomialRing, Zmod
from sage.rings.arith import next_prime
from sage.functions.all import log, ceil, sqrt
from sage.misc.misc_c import prod
from sage.modules.free_module_element import vector, zero_vector
from sage.stats.distributions.discrete_gaussian_lattice import DiscreteGaussianDistributionLatticeSampler as DGSL
from mmp import MMP
from util import *
import norms
class GGH(MMP):
@staticmethod
def set_params(lam, k):
n = pow(2, ceil(log(lam**2 * k)/log(2))) # dim of poly ring, closest power of 2 to k(lam^2)
q = next_prime(ZZ(2)**(8*k*lam) * n**k, proof=False) # prime modulus
sigma = int(sqrt(lam * n))
sigma_prime = lam * int(n**(1.5))
return (n, q, sigma, sigma_prime, k)
@profile(LOG, "setup")
def __init__(self, params, asym=False):
(self.n, self.q, sigma, self.sigma_prime, self.k) = params
S, x = PolynomialRing(ZZ, 'x').objgen()
self.R = S.quotient_ring(S.ideal(x**self.n + 1))
Sq = PolynomialRing(Zmod(self.q), 'x')
self.Rq = Sq.quotient_ring(Sq.ideal(x**self.n + 1))
# draw z_is uniformly from Rq and compute its inverse in Rq
if asym:
z = [self.Rq.random_element() for i in range(self.k)]
self.zinv = [z_i**(-1) for z_i in z]
else: # or do symmetric version
z = self.Rq.random_element()
zinv = z**(-1)
z, self.zinv = zip(*[(z,zinv) for i in range(self.k)])
# set up some discrete Gaussians
DGSL_sigma = DGSL(ZZ**self.n, sigma)
self.D_sigma = lambda: self.Rq(list(DGSL_sigma()))
# discrete Gaussian in ZZ^n with stddev sigma_prime, yields random level-0 encodings
DGSL_sigmap_ZZ = DGSL(ZZ**self.n, self.sigma_prime)
self.D_sigmap_ZZ = lambda: self.Rq(list(DGSL_sigmap_ZZ()))
# draw g repeatedly from a Gaussian distribution of Z^n (with param sigma)
# until g^(-1) in QQ[x]/<x^n + 1> is small (< n^2)
Sk = PolynomialRing(QQ, 'x')
K = Sk.quotient_ring(Sk.ideal(x**self.n + 1))
while True:
l = self.D_sigma()
ginv_K = K(mod_near_poly(l, self.q))**(-1)
ginv_size = vector(ginv_K).norm()
if ginv_size < self.n**2:
g = self.Rq(l)
self.ginv = g**(-1)
break
# discrete Gaussian in I = <g>, yields random encodings of 0
short_g = vector(ZZ, mod_near_poly(g,self.q))
DGSL_sigmap_I = DGSL(short_g, self.sigma_prime)
self.D_sigmap_I = lambda: self.Rq(list(DGSL_sigmap_I()))
# compute zero-testing parameter p_zt
# randomly draw h (in Rq) from a discrete Gaussian with param q^(1/2)
self.h = self.Rq(list(DGSL(ZZ**self.n, round(sqrt(self.q)))()))
# create p_zt
self.p_zt = self.ginv * self.h * prod(z)
def encode(self, m, S):
''' encodes a vector m (in Zmod(q)^n) to index set S '''
zinv = prod([self.zinv[i] for i in S])
m = vector(Zmod(self.q),m)
zero = vector(Zmod(self.q),self.D_sigmap_I()) # random encoding of 0
c = self.Rq(list(zero + m))
return c * zinv
def sample(self,S):
# draw an element of Rq from a Gaussian distribution of Z^n (with param sigmaprime)
# then encode at index set S
return self.D_sigmap_ZZ() * prod([self.zinv[i] for i in S])
def zero(self,S):
''' encoding of 0 at index S '''
return self.encode(list(self.D_sigmap_I()), S)
def is_zero(self, c):
w = self.Rq(c) * self.p_zt
return (norms.linf(w,self.q) < ZZ(RR(self.q)**(.75)))
| mit | 5,958,476,133,199,366,000 | 34.52381 | 113 | 0.57882 | false | 3.01536 | false | false | false |
autopkg/grahamgilbert-recipes | Puppetlabs/PuppetAgentProductsURLProvider.py | 1 | 3740 | #!/usr/bin/python
#
# Copyright 2015 Timothy Sutton, w/ insignificant contributions by Allister Banks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""See docstring for PuppetlabsProductsURLProvider class"""
from __future__ import absolute_import
import re
from distutils.version import LooseVersion
from autopkglib import Processor, ProcessorError, URLGetter
__all__ = ["PuppetAgentProductsURLProvider"]
DL_INDEX = "https://downloads.puppetlabs.com/mac"
DEFAULT_VERSION = "latest"
DEFAULT_PRODUCT_VERSION = "5"
OS_VERSION = "10.12"
class PuppetAgentProductsURLProvider(URLGetter):
"""Extracts a URL for a Puppet Labs item."""
description = __doc__
input_variables = {
"product_version": {
"required": False,
"description":
"Major version of the AIO installer. Either 5 or 6 at "
"present. Defaults to %s" % DEFAULT_PRODUCT_VERSION,
},
"get_version": {
"required": False,
"description":
("Specific version to request. Defaults to '%s', which "
"automatically finds the highest available release version."
% (DEFAULT_VERSION)),
},
"get_os_version": {
"required": False,
"description":
("When fetching the puppet-agent, collection-style pkg, "
"designates OS. Defaults to '%s'. Currently only 10.9 "
"or 10.10 packages are available."
% (OS_VERSION)),
},
}
output_variables = {
"version": {
"description": "Version of the product.",
},
"url": {
"description": "Download URL.",
},
}
def main(self):
"""Return a download URL for a PuppetLabs item"""
download_url = DL_INDEX
os_version = self.env.get("get_os_version", OS_VERSION)
product_version = self.env.get("product_version", DEFAULT_PRODUCT_VERSION)
version_re = r"\d+\.\d+\.\d+" # e.g.: 10.10/PC1/x86_64/puppet-agent-1.2.5-1.osx10.10.dmg
download_url += str("/puppet" + product_version + "/" + os_version + "/x86_64")
re_download = ("href=\"(puppet-agent-(%s)-1.osx(%s).dmg)\"" % (version_re, os_version))
try:
data = self.download(download_url, text=True)
except BaseException as err:
raise ProcessorError(
"Unexpected error retrieving download index: '%s'" % err)
# (dmg, version)
candidates = re.findall(re_download, data)
if not candidates:
raise ProcessorError(
"Unable to parse any products from download index.")
# sort to get the highest version
highest = candidates[0]
if len(candidates) > 1:
for prod in candidates:
if LooseVersion(prod[1]) > LooseVersion(highest[1]):
highest = prod
ver, url = highest[1], "%s/%s" % (download_url, highest[0])
self.env["version"] = ver
self.env["url"] = url
self.output("Found URL %s" % self.env["url"])
if __name__ == "__main__":
PROCESSOR = PuppetAgentProductsURLProvider()
PROCESSOR.execute_shell()
| apache-2.0 | 4,626,130,896,801,874,000 | 35.31068 | 96 | 0.598396 | false | 3.974495 | false | false | false |
willyrv/ms-PSMC | ms2psmcfa.py | 1 | 1714 | #!/usr/bin/env python
import sys, argparse
parser = argparse.ArgumentParser(description="Converts the output of MS into \
psmcfa format (the input file of psmc)")
parser.add_argument("-s", "--bin_size", type=int, default=100,
help="The equivalent of bin_size in psmc")
parser.add_argument("input_ms_results", help="The file produced by MS")
# Read the input from the command line
args = parser.parse_args()
BinSize = args.bin_size
fname = args.input_ms_results
# Read the file
with open(fname, 'r') as f:
ms_out_text = f.read()
ms_command = ms_out_text[:ms_out_text.index('\n')]
# Compute the total length of the simulated sequence
SeqLength = int(ms_command.split(' -r ')[1].split(' ')[1])
# Compute the number of bins (see PSMC documentation)
nBins = int(SeqLength / BinSize) + (SeqLength%BinSize != 0)
sequences_list = ms_out_text.split('segsites: ')[1:]
count = 0
for seq in sequences_list:
count += 1
(segsites, positions_list) = seq.split('\n')[:2]
segsites = int(segsites)
positions_list = positions_list.split()[1:]
# Start by a sequence of length nBins with all positions being
# heterozigous. As long as we find a SNP position, we compute the
# place in the sequence and we marked with 'K'
A=['T'] * nBins
for p in positions_list:
pos = int(float(SeqLength) * float(p) / BinSize )
A[pos] = 'K'
sys.stdout.write(">{}\n".format(count))
# Split the sequence in lines of 60 characters and send them to the
# standart output
for i in range(len(A)):
if i>0 and i%60==0:
sys.stdout.write('\n')
sys.stdout.write(A[i])
sys.stdout.write('\n')
| mit | 2,101,995,728,499,882,800 | 33.979592 | 78 | 0.63769 | false | 3.296154 | false | false | false |
sigurdga/nidarholm | accounts/forms.py | 1 | 2609 | from django.db import models
from django.contrib.auth.models import User, Group
from django.contrib.auth.forms import AuthenticationForm
from django import forms
from accounts.models import UserProfile
from django.forms.formsets import formset_factory
from django.forms.models import modelformset_factory, inlineformset_factory
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
class LoginForm(AuthenticationForm):
username = forms.CharField(label=_("Username"), max_length=30, help_text='%s <a href="%s" tabindex="4">%s</a>' % (_("Are you a new user?"), "/accounts/register/", _("Please register")))
password = forms.CharField(label=_("Password"), widget=forms.PasswordInput, help_text='%s <a href="%s" tabindex="5">%s</a>' % (_("No password?"), "/accounts/password/reset/", _("Reset your password")))
class ProfileForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(ProfileForm, self).__init__(*args, **kwargs)
try:
self.fields['username'].initial = self.instance.user.username
self.fields['email'].initial = self.instance.user.email
self.fields['first_name'].initial = self.instance.user.first_name
self.fields['last_name'].initial = self.instance.user.last_name
except User.DoesNotExist:
pass
first_name = forms.CharField(label=_("First name"), help_text="")
last_name = forms.CharField(label=_("Last name"), help_text="")
email = forms.EmailField(label=_("Primary email"), help_text="")
username = forms.CharField(label=_('Username'), max_length=30, help_text="")
class Meta:
model = UserProfile
fields = ('first_name', 'last_name', 'email', 'username', 'cellphone', 'address', 'postcode', 'born', 'personal_website', 'occupation', 'employer', 'employer_website')
def save(self, *args, **kwargs):
"""
Update the primary email address on the related User object as well.
"""
u = self.instance.user
u.email = self.cleaned_data['email']
u.first_name = self.cleaned_data['first_name']
u.last_name = self.cleaned_data['last_name']
u.username = self.cleaned_data['username']
u.save()
profile = super(ProfileForm, self).save(*args, **kwargs)
return profile
class UserGroupsForm(forms.ModelForm):
groups = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
widget=forms.CheckboxSelectMultiple,
required=False)
class Meta:
model = User
fields = ('id', 'groups')
| agpl-3.0 | 8,675,676,005,309,572,000 | 44.77193 | 205 | 0.658873 | false | 4.013846 | false | false | false |
storiesofsolidarity/story-database | stories/serializers.py | 1 | 5628 | from django.conf import settings
from rest_framework import serializers
from models import Location, Story
from localflavor.us.us_states import US_STATES
from people.models import Author
from people.serializers import AuthorSerializer
STORY_PREVIEW_MAX_COUNT = 3
STORY_PREVIEW_MAX_LENGTH = 100
# to remove
class LocationSerializer(serializers.ModelSerializer):
city = serializers.CharField(source='city_fmt', allow_blank=True, required=False)
state = serializers.CharField(source='state_fmt', allow_blank=True, required=False)
county = serializers.CharField(source='county_fmt', allow_blank=True, required=False)
class Meta:
model = Location
fields = ('id', 'zipcode', 'city', 'county', 'state', 'lon', 'lat')
class StateStoriesSerializer(serializers.ModelSerializer):
abbr = serializers.CharField(source='location__state')
name = serializers.SerializerMethodField('state_full')
story_count = serializers.IntegerField(read_only=True, source='id__count')
preview = serializers.SerializerMethodField('story_preview')
def state_full(self, obj):
abbr = obj.get('location__state')
if abbr:
return dict(US_STATES)[abbr]
else:
return ""
def story_preview(self, obj):
# returns limited preview of up recent stories in state
state = obj.get('location__state')
stories = (Story.objects.filter(display=True, location__state=state)
.order_by('created_at')[:STORY_PREVIEW_MAX_COUNT])
return [s.content[:STORY_PREVIEW_MAX_LENGTH].replace('\n', '') for s in stories]
class Meta:
model = Location
fields = ('id', 'abbr', 'name', 'story_count', 'preview')
class CountyStoriesSerializer(serializers.ModelSerializer):
name = serializers.CharField(source='location__county')
state = serializers.CharField(source='location__state')
state_name = serializers.SerializerMethodField('state_full')
story_count = serializers.IntegerField(read_only=True, source='id__count')
preview = serializers.SerializerMethodField('story_preview')
def state_full(self, obj):
abbr = obj.get('location__state')
if abbr:
return dict(US_STATES)[abbr]
else:
return ""
def story_preview(self, obj):
# returns limited preview of up recent stories in county
# TODO, limit by state as well?
county = obj.get('location__county')
if county:
stories = (Story.objects.filter(display=True, location__county__startswith=county)
.order_by('created_at')[:STORY_PREVIEW_MAX_COUNT])
return [s.content[:STORY_PREVIEW_MAX_LENGTH].replace('\n', '') for s in stories]
class Meta:
model = Location
fields = ('id', 'name', 'state', 'state_name', 'story_count', 'preview')
class ZipcodeStoriesSerializer(serializers.ModelSerializer):
story_count = serializers.IntegerField(read_only=True, source='id__count')
zipcode = serializers.CharField(source='location__zipcode')
class Meta:
model = Location
fields = ('id', 'zipcode', 'story_count')
class LocationStoriesSerializer(serializers.ModelSerializer):
story_count = serializers.IntegerField(read_only=True)
city = serializers.CharField(source='city_fmt')
state = serializers.CharField(source='state_fmt')
class Meta:
model = Location
fields = ('id', 'zipcode', 'city', 'state', 'lon', 'lat', 'story_count')
class StorySerializer(serializers.ModelSerializer):
author = AuthorSerializer(required=False)
location = LocationSerializer(required=False)
photo = serializers.SerializerMethodField('get_photo_url')
content = serializers.CharField(error_messages={'required': "Share a story before submitting"})
def get_photo_url(self, obj):
if obj.photo and obj.photo.url:
return obj.photo.url
else:
return ''
#abuse to_relationship to hide name for anonymous authors
def to_representation(self, instance):
data = super(StorySerializer, self).to_representation(instance)
if data['anonymous'] or data['author']['anonymous']:
name = data.pop('author')
return data
return data
def create(self, validated_data):
"Handles nested data and model lookup or creation for author and location."
initial_data = self.initial_data # instead of validated_data, which won't include non-named fields
name = initial_data.get('name')
author, new_author = Author.objects.get_or_create_user(user__name=name)
validated_data['author'] = author
city = initial_data.get('location.city')
state = initial_data.get('location.state')
if (city and state) or state:
location, new_location = Location.objects.get_or_create(city=city, state=state)
if new_location:
location.geocode('%s, %s' % (city, state))
location.save()
validated_data['location'] = location
else:
# overwrite the empty dict to avoid validation errors
validated_data['location'] = None
# save the photo
if 'photo' in initial_data:
validated_data['photo'] = initial_data['photo']
story = Story.objects.create(**validated_data) # here use validated_data which will include new objects
return story
class Meta:
model = Story
fields = ('id', 'created_at', 'updated_at',
'location', 'content', 'photo', 'author', 'anonymous')
| agpl-3.0 | -8,122,191,654,548,970,000 | 37.547945 | 112 | 0.653873 | false | 4.051836 | false | false | false |
noironetworks/group-based-policy | gbpservice/neutron/db/migration/alembic_migrations/versions/b6e301d5757f_application_policy_group.py | 1 | 1862 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""application_policy_group
Revision ID: b6e301d5757f
Revises: daaa11a358a2
Create Date: 2017-02-10 01:15:32.361753
"""
# revision identifiers, used by Alembic.
revision = 'b6e301d5757f'
down_revision = 'daaa11a358a2'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'gp_application_policy_groups',
sa.Column('id', sa.String(36), nullable=False),
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('name', sa.String(length=50), nullable=True),
sa.Column('description', sa.String(length=255), nullable=True),
sa.Column('status', sa.String(length=16), nullable=True),
sa.Column('status_details', sa.String(length=4096), nullable=True),
sa.PrimaryKeyConstraint('id'))
op.add_column(
'gp_policy_target_groups',
sa.Column('application_policy_group_id', sa.String(length=36),
nullable=True))
op.create_foreign_key('gp_application_policy_group_ibfk_1',
source='gp_policy_target_groups',
referent='gp_application_policy_groups',
local_cols=['application_policy_group_id'],
remote_cols=['id'])
def downgrade():
pass
| apache-2.0 | 2,764,178,118,187,971,600 | 32.854545 | 78 | 0.65145 | false | 3.687129 | false | false | false |
olof/svtplay-dl | lib/svtplay_dl/service/cmore.py | 1 | 4971 | from __future__ import absolute_import
from __future__ import unicode_literals
import logging
import re
from urllib.parse import urljoin
from urllib.parse import urlparse
from svtplay_dl.error import ServiceError
from svtplay_dl.fetcher.hls import hlsparse
from svtplay_dl.service import Service
class Cmore(Service):
supported_domains = ["www.cmore.se", "www.cmore.dk", "www.cmore.no", "www.cmore.fi"]
def get(self):
if not self.config.get("username") or not self.config.get("password"):
yield ServiceError("You need username and password to download things from this site.")
return
token, message = self._login()
if not token:
yield ServiceError(message)
return
vid = self._get_vid()
if not vid:
yield ServiceError("Can't find video id")
return
tld = self._gettld()
self.output["id"] = vid
metaurl = "https://playback-api.b17g.net/asset/{}?service=cmore.{}" "&device=browser&drm=widevine&protocol=dash%2Chls".format(
self.output["id"], tld
)
res = self.http.get(metaurl)
janson = res.json()
self._autoname(janson)
if janson["metadata"]["isDrmProtected"]:
yield ServiceError("Can't play this because the video got drm.")
return
url = "https://playback-api.b17g.net/media/{}?service=cmore.{}&device=browser&protocol=hls%2Cdash&drm=widevine".format(self.output["id"], tld)
res = self.http.request("get", url, cookies=self.cookies, headers={"authorization": "Bearer {}".format(token)})
if res.status_code > 200:
yield ServiceError("Can't play this because the video is geoblocked.")
return
if res.json()["playbackItem"]["type"] == "hls":
streams = hlsparse(
self.config,
self.http.request("get", res.json()["playbackItem"]["manifestUrl"]),
res.json()["playbackItem"]["manifestUrl"],
output=self.output,
)
for n in list(streams.keys()):
yield streams[n]
def find_all_episodes(self, config):
episodes = []
token, message = self._login()
if not token:
logging.error(message)
return
res = self.http.get(self.url)
tags = re.findall('<a class="card__link" href="([^"]+)"', res.text)
for i in tags:
url = urljoin("https://www.cmore.{}/".format(self._gettld()), i)
if url not in episodes:
episodes.append(url)
if config.get("all_last") > 0:
return sorted(episodes[-config.get("all_last") :])
return sorted(episodes)
def _gettld(self):
if isinstance(self.url, list):
parse = urlparse(self.url[0])
else:
parse = urlparse(self.url)
return re.search(r"\.(\w{2})$", parse.netloc).group(1)
def _login(self):
tld = self._gettld()
url = "https://www.cmore.{}/login".format(tld)
res = self.http.get(url, cookies=self.cookies)
if self.config.get("cmoreoperator"):
post = {
"username": self.config.get("username"),
"password": self.config.get("password"),
"operator": self.config.get("cmoreoperator"),
"country_code": tld,
}
else:
post = {"username": self.config.get("username"), "password": self.config.get("password")}
res = self.http.post("https://account.cmore.{}/session?client=cmore-web-prod".format(tld), json=post, cookies=self.cookies)
if res.status_code >= 400:
return None, "Wrong username or password"
janson = res.json()
token = janson["data"]["vimond_token"]
return token, None
def operatorlist(self):
res = self.http.get("https://tve.cmore.se/country/{}/operator?client=cmore-web".format(self._gettld()))
for i in res.json()["data"]["operators"]:
print("operator: '{}'".format(i["name"].lower()))
def _get_vid(self):
res = self.http.get(self.url)
match = re.search('data-asset-id="([^"]+)"', res.text)
if match:
return match.group(1)
parse = urlparse(self.url)
match = re.search(r"/(\d+)-[\w-]+$", parse.path)
if match:
return match.group(1)
return None
def _autoname(self, janson):
if "seriesTitle" in janson["metadata"]:
self.output["title"] = janson["metadata"]["seriesTitle"]
self.output["episodename"] = janson["metadata"]["episodeTitle"]
else:
self.output["title"] = janson["metadata"]["title"]
self.output["season"] = janson["metadata"]["seasonNumber"]
self.output["episode"] = janson["metadata"]["episodeNumber"]
self.config.set("live", janson["metadata"]["isLive"])
| mit | 6,163,772,104,817,491,000 | 36.37594 | 150 | 0.5689 | false | 3.760212 | true | false | false |
jacoblevine/PhenoGraph | phenograph/bruteforce_nn.py | 1 | 2423 | """
Compute k-nearest neighbors using brute force search in parallel
via scipy.spatial.distance.cdist and multiprocessing.Pool
psutil is used to evaluate available memory and minimize the number
of parallel jobs for the available resources
"""
import numpy as np
from scipy.spatial.distance import cdist
from multiprocessing import Pool
from contextlib import closing
from functools import partial
import psutil
def process_chunk(chunk, data, k, metric):
d = cdist(chunk, data, metric=metric).astype('float32')
p = np.argpartition(d, k).astype('int32')[:, :k]
rows = np.arange(chunk.shape[0])[:, None]
d = d[rows, p]
i = np.argsort(d)
return d[rows, i], p[rows, i]
def determine_n_chunks(n, k):
"""Assuming 32 bit representations for distances and indices"""
# available memory
available = psutil.virtual_memory().available
# memory needed to store final knn data (d, idx)
final = 2 * (n * k * 32) / 8
# total memory usable for subprocesses
usable = available - final
# usable per subprocess
usable_per_subprocess = usable / psutil.cpu_count()
# chunk size - number of n-dimensional distance arrays that can be held in memory by each subprocess simultaneously
chunk_size = usable_per_subprocess // (n * 32)
return int(n // chunk_size)
def knnsearch(data, k, metric):
"""k-nearest neighbor search via parallelized brute force
Parameters
----------
data : ndarray
n observations in d dimensions
k : int
number of neighbors (including self)
metric : str
see cdist documentation http://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.distance.cdist.html
Returns
-------
d : ndarray
distances to k nearest neighbors
idx : ndarray
indices of k nearest neighbors
Notes
-----
This implementation uses np.array_split to pass the data to subprocesses. This uses views and does not copy the data
in the subprocesses
"""
f = partial(process_chunk, **{'data': data, 'k': k, 'metric': metric})
n_chunks = determine_n_chunks(len(data), k)
if n_chunks > 2:
with closing(Pool()) as pool:
result = pool.map(f, np.array_split(data, n_chunks))
d, idx = zip(*result)
d, idx = np.vstack(d), np.vstack(idx)
else:
d, idx = process_chunk(data, data, k, metric)
return d, idx
| mit | -1,221,271,518,629,119,200 | 26.224719 | 120 | 0.660751 | false | 3.914378 | false | false | false |
bitglue/shinysdr | shinysdr/plugins/ghpsdr.py | 1 | 6648 | # Copyright 2014, 2015, 2016 Kevin Reid <[email protected]>
#
# This file is part of ShinySDR.
#
# ShinySDR is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ShinySDR is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ShinySDR. If not, see <http://www.gnu.org/licenses/>.
"""
This is a adapter to allow ghpsdr3-alex clients such as "glSDR" for
Android to connect to ShinySDR as a "dspserver"; references:
<http://openhpsdr.org/wiki/index.php?title=Ghpsdr3_protocols>.
<https://github.com/alexlee188/ghpsdr3-alex/tree/master/trunk/src/dspserver/client.c>
<https://github.com/alexlee188/ghpsdr3-alex/tree/master/trunk/src/dspserver/audiostream.c>
DOES NOT YET WORK: some messages we send have the wrong length as judged
by the glSDR client, resulting in the following messages being
misparsed. No success yet in figuring out where the discrepancy is.
Patches welcome.
"""
from __future__ import absolute_import, division
import array
import struct
from twisted.application.service import Service
from twisted.internet import defer
from twisted.internet import endpoints
from twisted.internet import protocol
from twisted.internet import task
from twisted.python import log
from gnuradio import gr
from shinysdr.twisted_ext import FactoryWithArgs
__all__ = ['DspserverService']
_CLIENT_MSG_LENGTH = 64
def _cmd_noop(self, argstr):
"""stub command implementation"""
pass
def _cmd_setFPS(self, argstr):
width, rate = [int(x) for x in argstr.split(' ')]
self._req_width = width
self._top.monitor.set_freq_resolution(width)
self._top.monitor.set_frame_rate(rate)
self._poller.start(1.0 / (rate * 2.0))
self._top.monitor.set_paused(False)
def _cmd_setFrequency(self, argstr):
pass
# TODO: reenable this
# freq = int(argstr)
# self._get_receiver().set_rec_freq(freq)
# self._top.source.set_freq(freq)
_dspserver_commands = {
'q-master': _cmd_noop, # don't know what this means
'setFPS': _cmd_setFPS,
'setFrequency': _cmd_setFrequency,
}
class _DspserverProtocol(protocol.Protocol):
def __init__(self, top):
self._top = top
self._req_width = None
self.__msgbuf = ''
self._poller = task.LoopingCall(self.__poll)
self.__splitter = top.monitor.state()['fft'].subscribe_to_stream()
self.__audio_queue = gr.msg_queue(limit=100)
self.__audio_buffer = ''
self._top.add_audio_queue(self.__audio_queue, 8000)
def dataReceived(self, data):
"""twisted Protocol implementation"""
self.__msgbuf += data
while len(self.__msgbuf) >= _CLIENT_MSG_LENGTH:
# TODO: efficient buffering
msg = self.__msgbuf[:_CLIENT_MSG_LENGTH]
self.__msgbuf = self.__msgbuf[_CLIENT_MSG_LENGTH:]
self.__messageReceived(msg)
def _get_receiver(self):
receiver_cells = self._top.receivers.state().values()
if len(receiver_cells) > 0:
receiver = receiver_cells[0].get()
else:
_, receiver = self._top.add_receiver('AM')
return receiver
def __messageReceived(self, data):
null = data.find('\0')
if null > -1:
data = data[:null]
print 'Message received: ' + data
sep = data.find(' ')
if sep > -1:
cmd = data[0:sep]
argstr = data[sep + 1:]
else:
cmd = data
argstr = ''
impl = _dspserver_commands.get(cmd)
if impl is not None:
impl(self, argstr)
def connectionLost(self, reason):
# pylint: disable=signature-differs
self._top.remove_audio_queue(self.__audio_queue)
self._poller.stop()
self.__splitter.close()
def __poll(self):
receiver = self._get_receiver()
while True:
frame = self.__splitter.get()
if frame is None:
break
((freq, sample_rate), fft) = frame
if self._req_width is None:
break
print 'Sending frame', self._req_width, sample_rate # TODO: Remove debugging
msg = struct.pack('BBBHHHIh' + str(self._req_width) + 's',
0,
2,
1,
self._req_width, # short
0, # meter
0, # subrx meter
sample_rate,
receiver.get_rec_freq() - freq, # lo_offset
''.join([chr(int(max(1, min(255, -(x - 20))))) for x in fft]))
self.transport.write(msg)
# audio
aqueue = self.__audio_queue
while not aqueue.empty_p():
# pylint: disable=no-member
grmessage = aqueue.delete_head()
self.__audio_buffer += grmessage.to_string()
size_in_bytes = 2000 * 4
if len(self.__audio_buffer) > size_in_bytes:
abuf = self.__audio_buffer[:size_in_bytes]
self.__audio_buffer = self.__audio_buffer[size_in_bytes:]
print 'Sending audio', len(abuf) # TODO: Remove debugging
unpacker = array.array('f')
unpacker.fromstring(abuf)
nsamples = len(unpacker)
msg = struct.pack('BBBH' + str(nsamples) + 'B',
1,
2,
1,
nsamples,
# TODO tweak
*[int(max(0, min(255, x * 127 + 127))) for x in unpacker.tolist()])
# TODO: Disabled until we fix fft messages
# self.transport.write(msg)
class DspserverService(Service):
def __init__(self, reactor, top, endpoint_string):
self.__top = top
self.__endpoint = endpoints.serverFromString(reactor, endpoint_string)
self.__port_obj = None
@defer.inlineCallbacks
def startService(self):
self.__port_obj = yield self.__endpoint.listen(
FactoryWithArgs.forProtocol(_DspserverProtocol, self.__top))
def stopService(self):
return self.__port_obj.stopListening()
def announce(self, open_client):
"""interface used by shinysdr.main"""
log.msg('GHPSDR-compatible server at port %s' % self.__port_obj.getHost().port)
| gpl-3.0 | 2,744,608,956,330,167,300 | 32.918367 | 90 | 0.603189 | false | 3.709821 | false | false | false |
idaholab/raven | framework/MessageHandler.py | 1 | 13476 | # Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Created on Apr 20, 2015
@author: talbpaul
"""
import sys
import time
import bisect
import builtins
from utils import utils
_starttime = time.time()
#custom exceptions
class NoMoreSamplesNeeded(GeneratorExit):
"""
Custom exception class for no more samples
"""
pass
"""
HOW THIS MODULE WORKS
The intention is for a single instance of the MessageHandler class to exist in any simulation.
Currently, that instance is created in the Simulation initialization and propogated through
all the RAVEN objects. This usually happens by passing it to BaseClass.readXML, but for
objects that don't inherit from BaseClass, the messageHandler instance should be passed
and set via instantiation or initialization. The appropriate class member to point at the
messageHandler instance reference is "self.messageHandler," for reasons that will be made clear
with the BaseClasses.MessageUser superclass.
While an object can access the messageHandler to raise messages and errors, for convienience
RAVEN provides the MessageUser superclass, which BaseType and (almost?) all other Raven objects
inherit from. This provides simplistic hooks for a developer to raise an error or message
with the standard message priorities, as
self.raiseAnError(IOError, 'Input value is invalid:', value)
There are currently 4 verbosity levels/message priorities. They are:
- silent: only errors are displayed
- quiet : errors and warnings are displayed
- all : (default) errors, warnings, and messages are displayed
- debug : errors, warnings, messages, and debug messages are displayed
The developer can change the priority level of their raised messages through the 'verbosity'
keyword. For example,
self.raiseAMessage('Hello, World', verbosity='silent')
will be printed along with errors if the simulation verbosity is set to 'silent', as well as
all other levels.
TL;DR: BaseClasses/MessageUser is a superclass that gives access to hooks to the simulation's MessageHandler
instance, while the MessageHandler is an output stream control tool.
In an effort to make the MH more flexible, we insert getMessageHandler into the python "builtins" module.
This means that any time after this module (MessageHandler) is imported, you can use
"getMessageHandler(name='default')" to retrieve a particular message handler as identified by "name".
"""
class MessageHandler(object):
"""
Class for handling messages, warnings, and errors in RAVEN. One instance of this
class should be created at the start of the Simulation and propagated through
the readMoreXML function of the BaseClass, and initialization of other classes.
"""
def __init__(self):
"""
Init of class
@ In, None
@ Out, None
"""
self.starttime = _starttime
self.printTag = 'MESSAGE HANDLER'
self.verbosity = 'all'
self.callerLength = 25
self.tagLength = 15
self.suppressErrs = False
self.printTime = True
self.inColor = False
self.verbCode = {'silent':0, 'quiet':1, 'all':2, 'debug':3}
self.colorDict = {'debug':'yellow', 'message':'neutral', 'warning':'magenta', 'error':'red'}
self.colors={
'neutral' : '\033[0m',
'red' : '\033[31m',
'green' : '\033[32m',
'yellow' : '\033[33m',
'blue' : '\033[34m',
'magenta' : '\033[35m',
'cyan' : '\033[36m'}
self.warnings = [] #collection of warnings that were raised during this run
self.warningCount = [] #count of the collections of warning above
def initialize(self, initDict):
"""
Initializes basic instance attributes
@ In, initDict, dict, dictionary of global options
@ Out, None
"""
self.verbosity = initDict.get('verbosity','all').lower()
self.callerLength = initDict.get('callerLength',25)
self.tagLength = initDict.get('tagLength',15)
self.suppressErrs = utils.stringIsTrue(initDict.get('suppressErrs', 'False'))
def printWarnings(self):
"""
Prints a summary of warnings collected during the run.
@ In, None
@ Out, None
"""
if len(self.warnings)>0:
if self.verbCode[self.verbosity]>0:
print('-'*50)
print('There were %i warnings during the simulation run:' %sum(self.warningCount))
for w,warning in enumerate(self.warnings):
count = self.warningCount[w]
time = 'time'
if count > 1:
time += 's'
print('(%i %s) %s' %(self.warningCount[w],time,warning))
print('-'*50)
else:
print('There were %i warnings during the simulation run.' %sum(self.warningCount))
def paint(self, str, color):
"""
Formats string with color
@ In, str, string, string
@ In, color, string, color name
@ Out, paint, string, formatted string
"""
if color.lower() not in self.colors.keys():
self.message(self,'Requested color %s not recognized! Skipping...' %color,'Warning','quiet')
return str
return self.colors[color.lower()]+str+self.colors['neutral']
def setTimePrint(self, msg):
"""
Allows the code to toggle timestamp printing.
@ In, msg, string, the string that means true or false
@ Out, None
"""
if utils.stringIsTrue(msg):
self.callerLength = 40
self.tagLength = 30
self.printTime = True
elif utils.stringIsFalse(msg):
self.callerLength = 25
self.tagLength = 15
self.printTime = False
def setColor(self, inColor):
"""
Allows output to screen to be colorized.
@ In, inColor, string, boolean value
@ Out, None
"""
if utils.stringIsTrue(inColor):
self.inColor = True
def getStringFromCaller(self, obj):
"""
Determines the appropriate print string from an object
@ In, obj, instance, preferably an object with a printTag method; otherwise, a string or an object
@ Out, tag, string, string to print
"""
if type(obj).__name__ in ['str','unicode']: # ?when is this ever not true?
return obj
if hasattr(obj,'printTag'):
tag = str(obj.printTag)
else:
tag = str(obj)
return tag
def getDesiredVerbosity(self, caller):
"""
Tries to use local verbosity; otherwise uses global
@ In, caller, instance, the object desiring to print
@ Out, desVerbosity, int, integer equivalent to verbosity level
"""
if hasattr(caller, 'getVerbosity'):
localVerb = caller.getVerbosity()
else:
localVerb = None
if localVerb is None:
localVerb = self.verbosity
desVerbosity = self.checkVerbosity(localVerb)
return desVerbosity
def checkVerbosity(self, verb):
"""
Converts English-readable verbosity to computer-legible integer
@ In, verb, string, the string verbosity equivalent
@ Out, currentVerb, int, integer equivalent to verbosity level
"""
if str(verb).strip().lower() not in self.verbCode.keys():
raise IOError(f'Verbosity key {verb} not recognized! Options are {list(self.verbCode.keys())}')
currentVerb = self.verbCode[str(verb).strip().lower()]
return currentVerb
def error(self, caller, etype, message, tag='ERROR', verbosity='silent', color=None):
"""
Raise an error message, unless errors are suppressed.
@ In, caller, object, the entity desiring to print a message
@ In, etype, Error, the type of error to throw
@ In, message, string, the message to print
@ In, tag, string, optional, the printed message type (usually Message, Debug, or Warning, and sometimes FIXME)
@ In, verbosity, string, optional, the print priority of the message
@ In, color, string, optional, color to apply to message
@ Out, None
"""
verbval = max(self.getDesiredVerbosity(caller),self.checkVerbosity(self.verbosity))
self.message(caller,message,tag,verbosity,color=color)
if not self.suppressErrs:
self.printWarnings()
# debug mode gets full traceback, others quieted
if verbval<3:
#all, quiet, silent
sys.tracebacklimit=0
raise etype(message)
def message(self, caller, message, tag, verbosity, color=None, writeTo=sys.stdout, forcePrint=False):
"""
Print a message
@ In, caller, object, the entity desiring to print a message
@ In, message, string, the message to print
@ In, tag, string, the printed message type (usually Message, Debug, or Warning, and sometimes FIXME)
@ In, verbosity, string, the print priority of the message
@ In, color, string, optional, color to apply to message
@ In, forcePrint, bool, optional, force the print independetly on the verbosity level? Defaul False
@ Out, None
"""
verbval = self.checkVerbosity(verbosity)
okay, msg = self._printMessage(caller, message, tag, verbval, color, forcePrint)
if tag.lower().strip() == 'warning':
self.addWarning(message)
if okay:
print(msg,file=writeTo)
sys.stdout.flush()
def addWarning(self, msg):
"""
Stores warnings so that they can be reported in summary later.
@ In, msg, string, only the main part of the message, used to determine uniqueness
@ Out, None
"""
index = bisect.bisect_left(self.warnings,msg)
if len(self.warnings) == 0 or index == len(self.warnings) or self.warnings[index] != msg:
self.warnings.insert(index,msg)
self.warningCount.insert(index,1)
else:
self.warningCount[index] += 1
def _printMessage(self, caller, message, tag, verbval, color=None, forcePrint=False):
"""
Checks verbosity to determine whether something should be printed, and formats message
@ In, caller , object, the entity desiring to print a message
@ In, message, string, the message to print
@ In, tag , string, the printed message type (usually Message, Debug, or Warning, and sometimes FIXME)
@ In, verbval, int , the print priority of the message
@ In, color, string, optional, color to apply to message
@ In, forcePrint, bool, optional, force the print independetly on the verbosity level? Defaul False
@ Out, (shouldIPrint,msg), tuple, shouldIPrint -> bool, indication if the print should be allowed
msg -> string, the formatted message
"""
#allows raising standardized messages
shouldIPrint = False
desired = self.getDesiredVerbosity(caller)
if verbval <= desired or forcePrint:
shouldIPrint=True
if not shouldIPrint:
return False,''
ctag = self.getStringFromCaller(caller)
msg=self.stdMessage(ctag,tag,message,color)
return shouldIPrint,msg
def stdMessage(self, pre, tag, post, color=None):
"""
Formats string for pretty printing
@ In, pre , string, who is printing the message
@ In, tag , string, the type of message being printed (Error, Warning, Message, Debug, FIXME, etc)
@ In, post , string, the actual message body
@ In, color, string, optional, color to apply to message
@ Out, msg, string, formatted message
"""
msg = ''
if self.printTime:
curtime = time.time()-self.starttime
msg+='('+'{:8.2f}'.format(curtime)+' sec) '
if self.inColor:
msg = self.paint(msg,'cyan')
msgend = pre.ljust(self.callerLength)[0:self.callerLength] + ': '+tag.ljust(self.tagLength)[0:self.tagLength]+' -> ' + post
if self.inColor:
if color is not None:
#overrides other options
msgend = self.paint(msgend,color)
elif tag.lower() in self.colorDict.keys():
msgend = self.paint(msgend,self.colorDict[tag.lower()])
msg+=msgend
return msg
def timePrint(message):
"""
Prints the time since start then the message
@ In, message, string
@ Out, None
"""
curtime = time.time()-_starttime
msg = ''
msg+='('+'{:8.2f}'.format(curtime)+' sec) '
print(msg + message)
_handlers = {}
def makeHandler(name):
"""
Instantiate and register new instance of message handler
@ In, name, str, identifying name for new handler
@ Out, makeHandler, MessageHandler, instance
"""
handler = MessageHandler()
_handlers[name] = handler
return handler
# default handler
makeHandler('default')
def getHandler(name='default'):
"""
Retrieve a message handling instance.
Styled after the Python logging module, maybe we should be switching to that.
@ In, name, str, optional, identifying name of handler to return
@ Out, getHandler, MessageHandler, instance (created if not existing)
"""
h = _handlers.get(name, None)
if h is None:
h = makeHandler(name)
# NOTE: idk why, but h = _handlers.get(name, makeHandler(name)) does not work.
# I think it's because it executes makeHandler(name) regardless of if name is present or not.
return h
builtins.getMessageHandler = getHandler
| apache-2.0 | -2,850,405,187,183,728,000 | 36.853933 | 127 | 0.675942 | false | 3.922002 | false | false | false |
ogonbat/django-shorty | shorty/admins.py | 2 | 1910 | from shorty.utils import url_encode
from django.contrib import admin
__author__ = 'cingusoft'
#admin section
class UrlAdmin(admin.ModelAdmin):
date_hierarchy = 'created'
list_display = ('url_field','show_slug','user','status','is_protected','created')
list_display_links = ('url_field',)
#list_editable = ('url_field',)
list_filter = ('status',)
search_fields = ('url_field','status')
fieldsets = (
('General',{
'fields':('url_field','user','status')
}),
('Advanced options',{
'classes': ('collapse',),
'fields':('private','private_password')
})
)
actions = ['ban_this_link','active_this_link','refuse_this_link','pending_this_link']
#actions
def ban_this_link(self,request,queryset):
queryset.update(status='Banned')
ban_this_link.short_description = 'Ban selected links'
#actions
def active_this_link(self,request,queryset):
queryset.update(status='Active')
active_this_link.short_description = 'Active selected links'
#actions
def refuse_this_link(self,request,queryset):
queryset.update(status='Refused')
refuse_this_link.short_description = 'Refuse selected links'
#actions
def pending_this_link(self,request,queryset):
queryset.update(status='Pending')
pending_this_link.short_description = 'Move selected links to Pending'
#property
def show_slug(self,obj):
if obj.personal:
#the link have a personal slug
return obj.personal_slug
else:
return url_encode(obj.id)
show_slug.short_description = "Slug"
#property
def is_protected(self,obj):
if obj.private:
#the link have a personal slug
return "yes"
else:
return "no"
is_protected.short_description = "Is Protected" | lgpl-3.0 | 6,498,760,609,118,806,000 | 30.327869 | 89 | 0.610995 | false | 3.946281 | false | false | false |
ratnania/pigasus | python/multigrid/multilevel.py | 1 | 12450 | # -*- coding: UTF-8 -*-
"""Generic GMG solver"""
__docformat__ = "restructuredtext en"
from warnings import warn
import scipy
import numpy
__all__ = ['multilevel_solver']
class level:
"""Stores one level of the multigrid hierarchy
All level objects will have an 'A' attribute referencing the matrix
of that level. All levels, except for the coarsest level, will
also have 'P' and 'R' attributes referencing the prolongation and
restriction operators that act between each level and the next
coarser level.
Attributes
----------
A :
Problem matrix for Ax=b
R : reduction
Restriction matrix between levels (often R = P.T)
P : interpolator
Prolongation or Interpolation matrix.
Notes
-----
The functionality of this class is a struct
"""
def __init__(self, withPETSc=False):
from pigasus.fem.matrix import matrix
self.withPETSc = withPETSc
self.R = None
self.P = None
self.A = matrix()
if self.withPETSc: # will be treated after
self.slv = None
self.smt = None
else:
from pigasus.solver.solver import solver
from pigasus.fem.constants import SPM_SOLVER_BASIC_CG, SPM_SOLVER_BASIC_GS
self.slv = solver(matrix=self.A, solver=SPM_SOLVER_BASIC_CG)
self.smt = solver(matrix=self.A, solver=SPM_SOLVER_BASIC_GS)
def set_P(self, P):
"""
"""
self.P = P
def set_R(self, R):
"""
"""
self.R = R
def set_A(self, A):
"""
"""
self.A.set(A)
def construct(self):
"""
construct the current level, operators and the coarse matrix
"""
self.P.construct()
self.R.construct()
def solve(self, b, maxiter=6000, tol=1.e-10):
if self.withPETSc:
_b = PETSc.Vec().createWithArray(b, comm=PETSc.COMM_SELF)
_x = PETSc.Vec().createWithArray(np.zeros_like(b), comm=PETSc.COMM_SELF)
self.ksp_slv.rtol = tol
# self.ksp_slv.setConvergenceHistory()
self.ksp_slv.solve(_b, _x)
return _x.getArray()
else:
return self.slv.solve(b, guess=np.zeros_like(b) \
, maxiter=maxiter, eps=tol)
def smoother(self, x, b, iterations=100, tol=1.e-10):
if self.withPETSc:
_b = PETSc.Vec().createWithArray(b, comm=PETSc.COMM_SELF)
_x = PETSc.Vec().createWithArray(np.zeros_like(b), comm=PETSc.COMM_SELF)
self.ksp_smt.rtol = tol
self.ksp_smt.max_it = nu
# self.ksp_smt.setConvergenceHistory()
self.ksp_smt.solve(_b, _x)
return _x.getArray()
else:
return self.smt.solve(b, guess=np.zeros_like(b) \
, maxiter=iterations, eps=tol)
class multilevel_solver:
"""Stores multigrid hierarchy and implements the multigrid cycle
The class constructs the cycling process and points to the methods for
coarse grid solves. A call to multilevel_solver.solve() is a typical access point.
The class also defines methods for constructing operator, cycle, and grid complexities.
Attributes
----------
levels : level array
Array of level objects that contain A, R, and P.
coarse_solver : string
String passed to coarse_grid_solver indicating the solve type
Methods
-------
cycle_complexity()
A measure of the cost of a single multigrid cycle.
grid_complexity()
A measure of the rate of coarsening.
operator_complexity()
A measure of the size of the multigrid hierarchy.
solve()
Iteratively solves a linear system for the right hand side.
"""
def __init__(self, list_geometry, gamma, nu1, nu2, withPETSc=False):
"""Creates a geometric multigrid for the matrix list_A[-1]
Parameters
----------
list_A : is a list of csr_matrix or pigasus-matrix. list_A[0] is on the finest grid
list_geometry : list of geometries [-1] -> the finest geometry and [0] -> the coarse
nlevels : the number of subdomains levels
Returns
-------
mg : the geometric multigrid
Examples
--------
>>> from scipy.sparse import csr_matrix
>>> from pigasus.gallery import poisson
See Also
--------
TODO
"""
# TODO : for the moment we only treate 1-patch geometries
self.withPETSc = withPETSc
self.geometries = list_geometry
self.dim = self.geometries[0].dim
self.nlevels = len(self.geometries)-1
self.gamma = gamma
self.nu1 = nu1
self.nu2 = nu2
self.nloop = 1
self.levels = []
for i in range(0, len(self.geometries)):
self.levels.append(level())
self.list_allresiduals = [] # list of residuals for each step
self.list_coarseresiduals = []
#-----------------------------------
#-----------------------------------
def initialize(self, list_A):
from scipy.sparse import identity as Id
from pigasus.multigrid.operators import interpolation, reduction
self.A = list_A[-1]
self.list_A = list_A
n,m = self.A.shape
ilvl = 0
lvl = self.levels[ilvl]
lvl.set(self.A, Id(n), Id(n))
geometries = self.geometries[::-1]
list_A = self.list_A[::-1]
for (geo_h, geo_H) in zip(geometries[:-1], geometries[1:]):
ilvl += 1
lvl = self.levels[ilvl]
# ... interpolator
P = interpolation(geo_H, geo_h)
# ... restriction
R = reduction(geo_H, geo_h)
# ... the coarse system
try:
A_H = list_A[i].get()
except:
print("Galerkin coarse grid operator has been initialized")
A_H = coarse_matrix(geo_H, geo_h, DirFaces=self.DirFaces)
A_h = self.levels[ilvl-1].A.get()
A_H.construct(A_h)
# print A_h.shape, A_H.shape
# A_H = A_H.tocsr()
lvl.set_P(P)
lvl.set_R(R)
lvl.set_A(A)
self.levels = self.levels[::-1]
#-----------------------------------
#-----------------------------------
def interpolation(self, level, vH):
P = self.levels[level].P
vh = P.apply(vH)
return vh
#-----------------------------------
#-----------------------------------
def restriction(self, level, vh):
R = self.levels[level].R
vH = R.apply(vh)
return vH
#-----------------------------------
#-----------------------------------
def mgcyc(self, k, gamma, ukm, fk, nu1, nu2 \
, smoother=None, coarse_solver=None):
"""
this routine will retrurn uk_{m+1} using ukm
"""
if smoother is None:
smoother = self.smoother
if coarse_solver is None:
coarse_solver = self.coarse_solver
nlevels = self.nlevels + 1
lvl = self.levels[::-1][nlevels-k]
lvl1 = self.levels[::-1][nlevels-k-1]
Rk = lvl.R
Pk = lvl.P
Lk = lvl1.A
Lk1 = lvl.A
# ----------------------------
# presmoothing
# ----------------------------
ukm_s = lvl1.smoother(ukm, fk, nu1)
# ukm_s = smoother(nu1, ukm, Lk, fk)
# ----------------------------
# ----------------------------
# coarse grid correction
# ----------------------------
# Compute the defect
dkm = fk - Lk.dot(ukm_s)
# Restrict the defect
dk1m = Rk.dot(dkm)
# Compute an approximate solution vk1m of the defect equation on Omega_{k-1}
# if k = 1, use a direct or fast iterative solver, by calling
if k == 1:
# TODO : enlever le guess
guess = np.zeros_like(dk1m)
vk1m = lvl.solve(dk1m)
# vk1m = coarse_solver(Lk1, guess, dk1m)
if k > 1:
a = np.zeros_like(dk1m)
vk1m_ = dk1m
for i in range(0, gamma):
dk1m_ = vk1m_
vk1m_, err_ = self.mgcyc(k-1, gamma, a, dk1m_, nu1, nu2 \
, smoother=smoother \
, coarse_solver=coarse_solver)
vk1m = vk1m_
# Interpolate the correction
# print "vk1m : ", vk1m.__class__.__name__, vk1m.shape
# print "Pk : ", Pk.__class__.__name__, Pk.shape
vkm = Pk.dot(vk1m)
# Compute the corrected approximation
ukm += vkm
# ----------------------------
# ----------------------------
# postsmoothing
# ----------------------------
ukp1m = lvl1.smoother(ukm, fk, nu2)
# ukp1m = smoother(nu2, ukm, Lk, fk)
# ----------------------------
err = residual_norm(Lk, ukp1m, fk)
return ukp1m, err
#-----------------------------------
def solve(self, b, x0=None, tol=1e-5, maxiter=100, cycle='V', residuals=None):
"""Main solution call to execute multigrid cycling.
Parameters
----------
b : array
Right hand side.
x0 : array
Initial guess.
tol : float
Stopping criteria: relative residual r[k]/r[0] tolerance.
maxiter : int
Stopping criteria: maximum number of allowable iterations.
cycle : {'V','W','F'}
Type of multigrid cycle to perform in each iteration.
residuals : list
List to contain residual norms at each iteration.
Returns
-------
x : array
Approximate solution to Ax=b
See Also
--------
aspreconditioner
Examples
--------
"""
if x0 is None:
x = np.zeros_like(b)
else:
x = np.array(x0) # copy
# Scale tol by normb
# normb = norm(b)
# if normb != 0:
# tol = tol * normb
residuals.append(residual_norm(self.A, x, b))
self.first_pass = True
self.nloop = 0
while len(residuals) <= maxiter and residuals[-1] > tol:
x, err = self.mgcyc(self.nlevels, self.gamma, x, b, self.nu1, self.nu2)
residuals.append(err)
self.first_pass = False
self.nloop += 1
return x
#-----------------------------------
def __repr__(self):
"""Prints basic statistics about the multigrid hierarchy.
"""
from pyamg.util.linalg import condest
levels = self.levels[::-1]
output = 'multilevel_solver\n'
output += 'Conditioning Number of the matrix: %d\n' % condest(self.A)
output += 'Number of Levels: %d\n' % len(levels)
output += 'Operator Complexity: %6.3f\n' % self.operator_complexity()
output += 'Grid Complexity: %6.3f\n' % self.grid_complexity()
# output += 'Coarse Solver: %s\n' % self.coarse_solver.name()
total_nnz = sum([level.A.nnz for level in levels])
output += ' level unknowns nonzeros\n'
for n, level in enumerate(levels):
A = level.A
output += ' %2d %10d %10d [%5.2f%%]\n' %\
(n, A.shape[1], A.nnz,\
(100 * float(A.nnz) / float(total_nnz)))
return output
def operator_complexity(self):
"""Operator complexity of this multigrid hierarchy
Defined as:
Number of nonzeros in the matrix on all levels /
Number of nonzeros in the matrix on the finest level
"""
levels = self.levels[::-1]
return sum([level.A.nnz for level in levels]) /\
float(levels[0].A.nnz)
def grid_complexity(self):
"""Grid complexity of this multigrid hierarchy
Defined as:
Number of unknowns on all levels /
Number of unknowns on the finest level
"""
levels = self.levels[::-1]
return sum([level.A.shape[0] for level in levels]) /\
float(levels[0].A.shape[0])
| mit | 5,843,988,426,219,080,000 | 29.589681 | 92 | 0.502892 | false | 3.720861 | false | false | false |
Lyrositor/moul-scripts | Python/ki/xMarkerBrainUser.py | 1 | 6335 | # -*- coding: utf-8 -*-
""" *==LICENSE==*
CyanWorlds.com Engine - MMOG client, server and tools
Copyright (C) 2011 Cyan Worlds, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Additional permissions under GNU GPL version 3 section 7
If you modify this Program, or any covered work, by linking or
combining it with any of RAD Game Tools Bink SDK, Autodesk 3ds Max SDK,
NVIDIA PhysX SDK, Microsoft DirectX SDK, OpenSSL library, Independent
JPEG Group JPEG library, Microsoft Windows Media SDK, or Apple QuickTime SDK
(or a modified version of those libraries),
containing parts covered by the terms of the Bink SDK EULA, 3ds Max EULA,
PhysX SDK EULA, DirectX SDK EULA, OpenSSL and SSLeay licenses, IJG
JPEG Library README, Windows Media SDK EULA, or QuickTime SDK EULA, the
licensors of this Program grant you additional
permission to convey the resulting work. Corresponding Source for a
non-source form of such a combination shall include the source code for
the parts of OpenSSL and IJG JPEG Library used as well as that of the covered
work.
You can contact Cyan Worlds, Inc. by email [email protected]
or by snail mail at:
Cyan Worlds, Inc.
14617 N Newport Hwy
Mead, WA 99021
*==LICENSE==* """
from Plasma import *
class UCMarkerGame(object):
def __init__(self, markerNode):
assert isinstance(markerNode, ptVaultMarkerGameNode)
self._node = markerNode
self._RefreshMarkersFromNode()
self._editMode = False
self._showingMarkers = False
self._playing = False
def AddMarker(self, age, pos, desc):
""""Adds a new marker to this game"""
idx = self._nextMarkerID
self._nextMarkerID += 1
self._markers.append((idx, age, pos, desc))
if self._showingMarkers and age.lower() == PtGetAgeName().lower():
ptMarkerMgr().addMarker(pos, idx, True)
return idx
def BeginEditingMarkers(self):
"""Displays all markers for editing"""
self._RefreshMarkersFromNode()
curAge = PtGetAgeName().lower()
mgr = self._ResetMarkerMgr()
self._editMode = True
self._showingMarkers = True
for idx, age, pos, desc in self.markers:
if curAge == age.lower():
mgr.addMarker(pos, idx, False)
def DeleteMarker(self, idx):
for i, marker in enumerate(self._markers):
if marker[0] == idx:
if self.selected_marker_id == idx:
self.selected_marker_id = -1
self._markers.pop(i)
return
raise KeyError(idx)
@property
def edit_mode(self):
return self._editMode
def FinishEditingMarkers(self):
"""Hides all markers and commits edits back to the vault node"""
self._editMode = False
self._ResetMarkerMgr()
self._node.setMarkers(self._markers)
self._node.save()
@property
def game_id(self):
return str(self._node.getID())
@property
def game_name(self):
return self._node.getGameName()
@property
def marker_total(self):
return len(self._markers)
@property
def markers(self):
return self._markers
@property
def markers_visible(self):
return self._showingMarkers
def Play(self):
self._playing = True
self._showingMarkers = True
self._RefreshMarkersFromNode()
@property
def playing(self):
return self._playing
def _RefreshMarkersFromNode(self):
# We hold a local copy of the markers so that we don't have to worry if the game is updated
# while we're in the middle of playing it.
self._markers = self._node.getMarkers()
# This will hold the next marker ID. Will be useful for adding new markers
if self._markers:
self._nextMarkerID = max(self._markers, key=lambda x: x[0])[0] + 1
else:
self._nextMarkerID = 0
def _ResetMarkerMgr(self):
self._showingMarkers = False
mgr = ptMarkerMgr()
mgr.clearSelectedMarker()
mgr.removeAllMarkers()
return mgr
@property
def selected_marker(self):
id = ptMarkerMgr().getSelectedMarker()
if id != -1:
for marker in self._markers:
if marker[0] == id:
return marker
return None
def _get_selected_marker_id(self):
return ptMarkerMgr().getSelectedMarker()
def _set_selected_marker_id(self, value):
ptMarkerMgr().setSelectedMarker(value)
selected_marker_id = property(_get_selected_marker_id, _set_selected_marker_id)
def _get_selected_marker_index(self):
wantID = ptMarkerMgr().getSelectedMarker()
for idx, (id, age, pos, desc) in enumerate(self._markers):
if id == wantID:
return idx
return -1
def _set_selected_marker_index(self, value):
for idx, (id, age, pos, desc) in enumerate(self._markers):
if idx == value:
ptMarkerMgr().setSelectedMarker(id)
return
selected_marker_index = property(_get_selected_marker_index, _set_selected_marker_index)
def _get_selected_marker_name(self):
marker = self.selected_marker
if marker is not None:
return marker[3]
return "?UNKOWN MARKER?"
def _set_selected_marker_name(self, value):
idx = self.selected_marker_index
if idx != -1:
id, age, pos, desc = self._markers[idx]
self._markers[idx] = (id, age, pos, value)
selected_marker_name = property(_get_selected_marker_name, _set_selected_marker_name)
def Stop(self):
self._playing = False
self._ResetMarkerMgr()
| gpl-3.0 | 2,033,298,938,458,134,300 | 32.877005 | 99 | 0.640726 | false | 3.917749 | false | false | false |
plumgrid/plumgrid-nova | tools/xenserver/destroy_cached_images.py | 24 | 1946 | """
destroy_cached_images.py
This script is used to clean up Glance images that are cached in the SR. By
default, this script will only cleanup unused cached images.
Options:
--dry_run - Don't actually destroy the VDIs
--all_cached - Destroy all cached images instead of just unused cached
images.
"""
import eventlet
eventlet.monkey_patch()
import os
import sys
from oslo.config import cfg
# If ../nova/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir,
os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'nova', '__init__.py')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from nova import config
from nova.openstack.common import log as logging
from nova import utils
from nova.virt.xenapi import driver as xenapi_driver
from nova.virt.xenapi import vm_utils
destroy_opts = [
cfg.BoolOpt('all_cached',
default=False,
help='Destroy all cached images instead of just unused cached'
' images.'),
cfg.BoolOpt('dry_run',
default=False,
help='Don\'t actually delete the VDIs.')
]
CONF = cfg.CONF
CONF.register_cli_opts(destroy_opts)
def main():
config.parse_args(sys.argv)
utils.monkey_patch()
xenapi = xenapi_driver.XenAPIDriver()
session = xenapi._session
sr_ref = vm_utils.safe_find_sr(session)
destroyed = vm_utils.destroy_cached_images(
session, sr_ref, all_cached=CONF.all_cached,
dry_run=CONF.dry_run)
if '--verbose' in sys.argv:
print '\n'.join(destroyed)
print "Destroyed %d cached VDIs" % len(destroyed)
if __name__ == "__main__":
main()
| apache-2.0 | 8,075,085,348,729,097,000 | 27.202899 | 78 | 0.623844 | false | 3.678639 | false | false | false |
5225225/rtv | tests/test_terminal.py | 1 | 17690 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
import os
import curses
import codecs
import six
import pytest
from rtv.docs import HELP, COMMENT_EDIT_FILE
from rtv.objects import Color
from rtv.exceptions import TemporaryFileError, MailcapEntryNotFound
try:
from unittest import mock
except ImportError:
import mock
def test_terminal_properties(terminal, config):
assert len(terminal.up_arrow) == 2
assert isinstance(terminal.up_arrow[0], six.text_type)
assert len(terminal.down_arrow) == 2
assert isinstance(terminal.down_arrow[0], six.text_type)
assert len(terminal.neutral_arrow) == 2
assert isinstance(terminal.neutral_arrow[0], six.text_type)
assert len(terminal.guilded) == 2
assert isinstance(terminal.guilded[0], six.text_type)
terminal._display = None
with mock.patch.dict('os.environ', {'DISPLAY': ''}):
assert terminal.display is False
terminal._display = None
with mock.patch('rtv.terminal.sys') as sys, \
mock.patch.dict('os.environ', {'DISPLAY': ''}):
sys.platform = 'darwin'
assert terminal.display is False
terminal._display = None
with mock.patch.dict('os.environ', {'DISPLAY': ':0', 'BROWSER': 'w3m'}):
assert terminal.display is False
terminal._display = None
with mock.patch.dict('os.environ', {'DISPLAY': ':0', 'BROWSER': ''}), \
mock.patch('webbrowser._tryorder'):
assert terminal.display is True
assert terminal.get_arrow(None) is not None
assert terminal.get_arrow(True) is not None
assert terminal.get_arrow(False) is not None
assert terminal.config == config
assert terminal.loader is not None
assert terminal.MIN_HEIGHT is not None
assert terminal.MIN_WIDTH is not None
def test_terminal_functions(terminal):
terminal.flash()
assert curses.flash.called
terminal.getch()
assert terminal.stdscr.getch.called
with pytest.raises(RuntimeError):
with terminal.no_delay():
raise RuntimeError()
terminal.stdscr.nodelay.assert_any_call(0)
terminal.stdscr.nodelay.assert_any_call(1)
curses.endwin.reset_mock()
curses.doupdate.reset_mock()
with terminal.suspend():
pass
assert curses.endwin.called
assert curses.doupdate.called
curses.endwin.reset_mock()
curses.doupdate.reset_mock()
with pytest.raises(RuntimeError):
with terminal.suspend():
raise RuntimeError()
assert curses.endwin.called
assert curses.doupdate.called
terminal.addch(terminal.stdscr, 3, 5, 'ch', 'attr')
terminal.stdscr.addch.assert_called_with(3, 5, 'ch', 'attr')
def test_terminal_clean_ascii(terminal):
terminal.config['ascii'] = True
# unicode returns ascii
text = terminal.clean('hello ❤')
assert isinstance(text, six.binary_type)
assert text.decode('ascii') == 'hello ?'
# utf-8 returns ascii
text = terminal.clean('hello ❤'.encode('utf-8'))
assert isinstance(text, six.binary_type)
assert text.decode('ascii') == 'hello ?'
# ascii returns ascii
text = terminal.clean('hello'.encode('ascii'))
assert isinstance(text, six.binary_type)
assert text.decode('ascii') == 'hello'
def test_terminal_clean_unicode(terminal):
terminal.config['ascii'] = False
# unicode returns utf-8
text = terminal.clean('hello ❤')
assert isinstance(text, six.binary_type)
assert text.decode('utf-8') == 'hello ❤'
# utf-8 returns utf-8
text = terminal.clean('hello ❤'.encode('utf-8'))
assert isinstance(text, six.binary_type)
assert text.decode('utf-8') == 'hello ❤'
# ascii returns utf-8
text = terminal.clean('hello'.encode('ascii'))
assert isinstance(text, six.binary_type)
assert text.decode('utf-8') == 'hello'
def test_terminal_clean_ncols(terminal):
text = terminal.clean('hello', n_cols=5)
assert text.decode('utf-8') == 'hello'
text = terminal.clean('hello', n_cols=4)
assert text.decode('utf-8') == 'hell'
text = terminal.clean('hello', n_cols=10)
assert text.decode('utf-8') == 'hello'
text = terminal.clean('hello', n_cols=9)
assert text.decode('utf-8') == 'hell'
@pytest.mark.parametrize('use_ascii', [True, False])
def test_terminal_clean_unescape_html(terminal, use_ascii):
# HTML characters get decoded
terminal.config['ascii'] = use_ascii
text = terminal.clean('<')
assert isinstance(text, six.binary_type)
assert text.decode('ascii' if use_ascii else 'utf-8') == '<'
@pytest.mark.parametrize('use_ascii', [True, False])
def test_terminal_add_line(terminal, stdscr, use_ascii):
terminal.config['ascii'] = use_ascii
terminal.add_line(stdscr, 'hello')
assert stdscr.addstr.called_with(0, 0, 'hello'.encode('ascii'))
stdscr.reset_mock()
# Text will be drawn, but cut off to fit on the screen
terminal.add_line(stdscr, 'hello', row=3, col=75)
assert stdscr.addstr.called_with((3, 75, 'hell'.encode('ascii')))
stdscr.reset_mock()
# Outside of screen bounds, don't even try to draw the text
terminal.add_line(stdscr, 'hello', col=79)
assert not stdscr.addstr.called
stdscr.reset_mock()
@pytest.mark.parametrize('use_ascii', [True, False])
def test_show_notification(terminal, stdscr, use_ascii):
terminal.config['ascii'] = use_ascii
# Multi-line messages should be automatically split
text = 'line 1\nline 2\nline3'
terminal.show_notification(text)
assert stdscr.subwin.nlines == 5
assert stdscr.subwin.addstr.call_count == 3
stdscr.reset_mock()
# The text should be trimmed to fit 40x80
text = HELP.strip().splitlines()
terminal.show_notification(text)
assert stdscr.subwin.nlines == 40
assert stdscr.subwin.ncols <= 80
assert stdscr.subwin.addstr.call_count == 38
stdscr.reset_mock()
# The text should be trimmed to fit in 20x20
stdscr.nlines, stdscr.ncols = 15, 20
text = HELP.strip().splitlines()
terminal.show_notification(text)
assert stdscr.subwin.nlines == 15
assert stdscr.subwin.ncols == 20
assert stdscr.subwin.addstr.call_count == 13
@pytest.mark.parametrize('use_ascii', [True, False])
def test_text_input(terminal, stdscr, use_ascii):
terminal.config['ascii'] = use_ascii
stdscr.nlines = 1
# Text will be wrong because stdscr.inch() is not implemented
# But we can at least tell if text was captured or not
stdscr.getch.side_effect = [ord('h'), ord('i'), ord('!'), terminal.RETURN]
assert isinstance(terminal.text_input(stdscr), six.text_type)
stdscr.getch.side_effect = [ord('b'), ord('y'), ord('e'), terminal.ESCAPE]
assert terminal.text_input(stdscr) is None
stdscr.getch.side_effect = [ord('h'), curses.KEY_RESIZE, terminal.RETURN]
assert terminal.text_input(stdscr, allow_resize=True) is not None
stdscr.getch.side_effect = [ord('h'), curses.KEY_RESIZE, terminal.RETURN]
assert terminal.text_input(stdscr, allow_resize=False) is None
@pytest.mark.parametrize('use_ascii', [True, False])
def test_prompt_input(terminal, stdscr, use_ascii):
terminal.config['ascii'] = use_ascii
window = stdscr.derwin()
window.getch.side_effect = [ord('h'), ord('i'), terminal.RETURN]
assert isinstance(terminal.prompt_input('hi'), six.text_type)
attr = Color.CYAN | curses.A_BOLD
stdscr.subwin.addstr.assert_called_with(0, 0, 'hi'.encode('ascii'), attr)
assert window.nlines == 1
assert window.ncols == 78
window.getch.side_effect = [ord('b'), ord('y'), ord('e'), terminal.ESCAPE]
assert terminal.prompt_input('hi') is None
stdscr.getch.side_effect = [ord('b'), ord('e'), terminal.RETURN]
assert terminal.prompt_input('hi', key=True) == ord('b')
stdscr.getch.side_effect = [terminal.ESCAPE, ord('e'), ord('l')]
assert terminal.prompt_input('hi', key=True) is None
def test_prompt_y_or_n(terminal, stdscr):
stdscr.getch.side_effect = [ord('y'), ord('N'), terminal.ESCAPE, ord('a')]
attr = Color.CYAN | curses.A_BOLD
text = 'hi'.encode('ascii')
# Press 'y'
assert terminal.prompt_y_or_n('hi')
stdscr.subwin.addstr.assert_called_with(0, 0, text, attr)
assert not curses.flash.called
# Press 'N'
assert not terminal.prompt_y_or_n('hi')
stdscr.subwin.addstr.assert_called_with(0, 0, text, attr)
assert not curses.flash.called
# Press Esc
assert not terminal.prompt_y_or_n('hi')
stdscr.subwin.addstr.assert_called_with(0, 0, text, attr)
assert not curses.flash.called
# Press an invalid key
assert not terminal.prompt_y_or_n('hi')
stdscr.subwin.addstr.assert_called_with(0, 0, text, attr)
assert curses.flash.called
@pytest.mark.parametrize('use_ascii', [True, False])
def test_open_editor(terminal, use_ascii):
terminal.config['ascii'] = use_ascii
comment = COMMENT_EDIT_FILE.format(content='#| This is a comment! ❤')
data = {'filename': None}
def side_effect(args):
data['filename'] = args[1]
with codecs.open(data['filename'], 'r+', 'utf-8') as fp:
assert fp.read() == comment
fp.write('This is an amended comment! ❤')
return mock.Mock()
with mock.patch('subprocess.Popen', autospec=True) as Popen:
Popen.side_effect = side_effect
with terminal.open_editor(comment) as reply_text:
assert reply_text == 'This is an amended comment! ❤'
assert os.path.isfile(data['filename'])
assert curses.endwin.called
assert curses.doupdate.called
assert not os.path.isfile(data['filename'])
def test_open_editor_error(terminal):
with mock.patch('subprocess.Popen', autospec=True) as Popen, \
mock.patch.object(terminal, 'show_notification'):
# Invalid editor
Popen.side_effect = OSError
with terminal.open_editor('hello') as text:
assert text == 'hello'
assert 'Could not open' in terminal.show_notification.call_args[0][0]
data = {'filename': None}
def side_effect(args):
data['filename'] = args[1]
return mock.Mock()
# Temporary File Errors don't delete the file
Popen.side_effect = side_effect
with terminal.open_editor('test'):
assert os.path.isfile(data['filename'])
raise TemporaryFileError()
assert os.path.isfile(data['filename'])
os.remove(data['filename'])
# Other Exceptions don't delete the file *and* are propagated
Popen.side_effect = side_effect
with pytest.raises(ValueError):
with terminal.open_editor('test'):
assert os.path.isfile(data['filename'])
raise ValueError()
assert os.path.isfile(data['filename'])
os.remove(data['filename'])
# Gracefully handle the case when we can't remove the file
with mock.patch.object(os, 'remove'):
os.remove.side_effect = OSError
with terminal.open_editor():
pass
assert os.remove.called
assert os.path.isfile(data['filename'])
os.remove(data['filename'])
def test_open_link_mailcap(terminal):
url = 'http://www.test.com'
class MockMimeParser(object):
pattern = re.compile('')
mock_mime_parser = MockMimeParser()
with mock.patch.object(terminal, 'open_browser'), \
mock.patch('rtv.terminal.mime_parsers') as mime_parsers:
mime_parsers.parsers = [mock_mime_parser]
# Pass through to open_browser if media is disabled
terminal.config['enable_media'] = False
terminal.open_link(url)
assert terminal.open_browser.called
terminal.open_browser.reset_mock()
# Invalid content type
terminal.config['enable_media'] = True
mock_mime_parser.get_mimetype = lambda url: (url, None)
terminal.open_link(url)
assert terminal.open_browser.called
terminal.open_browser.reset_mock()
# Text/html defers to open_browser
mock_mime_parser.get_mimetype = lambda url: (url, 'text/html')
terminal.open_link(url)
assert terminal.open_browser.called
terminal.open_browser.reset_mock()
def test_open_link_subprocess(terminal):
url = 'http://www.test.com'
terminal.config['enable_media'] = True
with mock.patch('time.sleep'), \
mock.patch('os.system'), \
mock.patch('subprocess.Popen') as Popen, \
mock.patch('six.moves.input') as six_input, \
mock.patch.object(terminal, 'get_mailcap_entry'):
six_input.return_values = 'y'
def reset_mock():
six_input.reset_mock()
os.system.reset_mock()
terminal.stdscr.subwin.addstr.reset_mock()
Popen.return_value.communicate.return_value = '', 'stderr message'
Popen.return_value.poll.return_value = 0
Popen.return_value.wait.return_value = 0
def get_error():
# Check if an error message was printed to the terminal
status = 'Program exited with status'.encode('utf-8')
return any(status in args[0][2] for args in
terminal.stdscr.subwin.addstr.call_args_list)
# Non-blocking success
reset_mock()
entry = ('echo ""', 'echo %s')
terminal.get_mailcap_entry.return_value = entry
terminal.open_link(url)
assert not six_input.called
assert not get_error()
# Non-blocking failure
reset_mock()
Popen.return_value.poll.return_value = 127
Popen.return_value.wait.return_value = 127
entry = ('fake .', 'fake %s')
terminal.get_mailcap_entry.return_value = entry
terminal.open_link(url)
assert not six_input.called
assert get_error()
# needsterminal success
reset_mock()
entry = ('echo ""', 'echo %s; needsterminal')
terminal.get_mailcap_entry.return_value = entry
terminal.open_link(url)
assert not six_input.called
assert not get_error()
# needsterminal failure
reset_mock()
Popen.return_value.poll.return_value = 127
Popen.return_value.wait.return_value = 127
entry = ('fake .', 'fake %s; needsterminal')
terminal.get_mailcap_entry.return_value = entry
terminal.open_link(url)
assert not six_input.called
assert get_error()
# copiousoutput success
reset_mock()
entry = ('echo ""', 'echo %s; needsterminal; copiousoutput')
terminal.get_mailcap_entry.return_value = entry
terminal.open_link(url)
assert six_input.called
assert not get_error()
# copiousoutput failure
reset_mock()
Popen.return_value.poll.return_value = 127
Popen.return_value.wait.return_value = 127
entry = ('fake .', 'fake %s; needsterminal; copiousoutput')
terminal.get_mailcap_entry.return_value = entry
terminal.open_link(url)
assert six_input.called
assert get_error()
def test_open_browser(terminal):
url = 'http://www.test.com'
terminal._display = True
with mock.patch('subprocess.Popen', autospec=True) as Popen:
Popen.return_value.poll.return_value = 0
terminal.open_browser(url)
assert Popen.called
assert not curses.endwin.called
assert not curses.doupdate.called
terminal._display = False
with mock.patch('webbrowser.open_new_tab', autospec=True) as open_new_tab:
terminal.open_browser(url)
open_new_tab.assert_called_with(url)
assert curses.endwin.called
assert curses.doupdate.called
def test_open_pager(terminal, stdscr):
data = "Hello World! ❤"
def side_effect(args, stdin=None):
assert stdin is not None
raise OSError
with mock.patch('subprocess.Popen', autospec=True) as Popen, \
mock.patch.dict('os.environ', {'PAGER': 'fake'}):
Popen.return_value.stdin = mock.Mock()
terminal.open_pager(data)
assert Popen.called
assert not stdscr.addstr.called
# Raise an OS error
Popen.side_effect = side_effect
terminal.open_pager(data)
message = 'Could not open pager fake'.encode('ascii')
assert stdscr.addstr.called_with(0, 0, message)
def test_open_urlview(terminal, stdscr):
data = "Hello World! ❤"
def side_effect(args, stdin=None):
assert stdin is not None
raise OSError
with mock.patch('subprocess.Popen') as Popen, \
mock.patch.dict('os.environ', {'RTV_URLVIEWER': 'fake'}):
Popen.return_value.poll.return_value = 0
terminal.open_urlview(data)
assert Popen.called
assert not stdscr.addstr.called
Popen.return_value.poll.return_value = 1
terminal.open_urlview(data)
assert stdscr.subwin.addstr.called
# Raise an OS error
Popen.side_effect = side_effect
terminal.open_urlview(data)
message = 'Failed to open fake'.encode('utf-8')
assert stdscr.addstr.called_with(0, 0, message)
def test_strip_textpad(terminal):
assert terminal.strip_textpad(None) is None
assert terminal.strip_textpad(' foo ') == ' foo'
text = 'alpha bravo\ncharlie \ndelta \n echo \n\nfoxtrot\n\n\n'
assert terminal.strip_textpad(text) == (
'alpha bravocharlie delta\n echo\n\nfoxtrot')
| mit | 6,218,785,795,478,843,000 | 31.408088 | 78 | 0.639421 | false | 3.582605 | true | false | false |
Ultimaker/Cura | plugins/DigitalLibrary/src/DigitalFactoryProjectModel.py | 1 | 2503 | # Copyright (c) 2021 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from typing import List, Optional
from PyQt5.QtCore import Qt, pyqtSignal
from UM.Logger import Logger
from UM.Qt.ListModel import ListModel
from .DigitalFactoryProjectResponse import DigitalFactoryProjectResponse
PROJECT_UPDATED_AT_DATETIME_FORMAT = "%d-%m-%Y"
class DigitalFactoryProjectModel(ListModel):
DisplayNameRole = Qt.UserRole + 1
LibraryProjectIdRole = Qt.UserRole + 2
DescriptionRole = Qt.UserRole + 3
ThumbnailUrlRole = Qt.UserRole + 5
UsernameRole = Qt.UserRole + 6
LastUpdatedRole = Qt.UserRole + 7
dfProjectModelChanged = pyqtSignal()
def __init__(self, parent = None) -> None:
super().__init__(parent)
self.addRoleName(self.DisplayNameRole, "displayName")
self.addRoleName(self.LibraryProjectIdRole, "libraryProjectId")
self.addRoleName(self.DescriptionRole, "description")
self.addRoleName(self.ThumbnailUrlRole, "thumbnailUrl")
self.addRoleName(self.UsernameRole, "username")
self.addRoleName(self.LastUpdatedRole, "lastUpdated")
self._projects = [] # type: List[DigitalFactoryProjectResponse]
def setProjects(self, df_projects: List[DigitalFactoryProjectResponse]) -> None:
if self._projects == df_projects:
return
self._items.clear()
self._projects = df_projects
# self.sortProjectsBy("display_name")
self._update(df_projects)
def extendProjects(self, df_projects: List[DigitalFactoryProjectResponse]) -> None:
if not df_projects:
return
self._projects.extend(df_projects)
# self.sortProjectsBy("display_name")
self._update(df_projects)
def clearProjects(self) -> None:
self.clear()
self._projects.clear()
self.dfProjectModelChanged.emit()
def _update(self, df_projects: List[DigitalFactoryProjectResponse]) -> None:
for project in df_projects:
self.appendItem({
"displayName" : project.display_name,
"libraryProjectId" : project.library_project_id,
"description": project.description,
"thumbnailUrl": project.thumbnail_url,
"username": project.username,
"lastUpdated": project.last_updated.strftime(PROJECT_UPDATED_AT_DATETIME_FORMAT) if project.last_updated else "",
})
self.dfProjectModelChanged.emit()
| lgpl-3.0 | -117,025,161,842,275,890 | 38.109375 | 129 | 0.669596 | false | 4.011218 | false | false | false |
votervoice/openstates | openstates/ok/events.py | 1 | 1610 | import re
import datetime
import time
import pytz
import lxml.html
from pupa.scrape import Scraper, Event
class OKEventScraper(Scraper):
_tz = pytz.timezone('CST6CDT')
def scrape(self, chamber=None):
chambers = [chamber] if chamber is not None else ['upper']
for chamber in chambers:
yield from self.scrape_upper()
def scrape_upper(self):
url = "http://www.oksenate.gov/Committees/meetingnotices.htm"
page = lxml.html.fromstring(self.get(url).text)
page.make_links_absolute(url)
text = page.text_content()
_, text = text.split('MEETING NOTICES')
re_date = r'[A-Z][a-z]+,\s+[A-Z][a-z]+ \d+, \d{4}'
chunks = zip(re.finditer(re_date, text), re.split(re_date, text)[1:])
for match, data in chunks:
when = match.group()
when = datetime.datetime.strptime(when, "%A, %B %d, %Y")
lines = filter(None, [x.strip() for x in data.splitlines()])
time_ = re.search(r'^\s*TIME:\s+(.+?)\s+\x96', data, re.M).group(1)
time_ = time_.replace('a.m.', 'AM').replace('p.m.', 'PM')
time_ = time.strptime(time_, '%I:%M %p')
when += datetime.timedelta(hours=time_.tm_hour, minutes=time_.tm_min)
title = lines[0]
where = re.search(r'^\s*PLACE:\s+(.+)', data, re.M).group(1)
where = where.strip()
event = Event(name=title,
start_date=self._tz.localize(when),
location_name=where)
event.add_source(url)
yield event
| gpl-3.0 | 6,088,113,025,590,962,000 | 33.255319 | 81 | 0.549689 | false | 3.252525 | false | false | false |
lilchurro/vent | vent/helpers/meta.py | 1 | 21703 | import datetime
import docker
import json
import math
import multiprocessing
import os
import pkg_resources
import platform
import re
import requests
from subprocess import check_output, Popen, PIPE
from vent.api.templates import Template
from vent.helpers.paths import PathDirs
from vent.helpers.logs import Logger
logger = Logger(__name__)
def Version():
""" Get Vent version """
version = ''
try:
version = pkg_resources.require("vent")[0].version
if not version.startswith('v'):
version = 'v' + version
except Exception as e: # pragma: no cover
version = "Error: " + str(e)
return version
def System():
""" Get system operating system """
return platform.system()
def Docker():
""" Get Docker setup information """
docker_info = {'server': {}, 'env': '', 'type': '', 'os': ''}
# get docker server version
try:
d_client = docker.from_env()
docker_info['server'] = d_client.version()
except Exception as e: # pragma: no cover
logger.error("Can't get docker info " + str(e))
# get operating system
system = System()
docker_info['os'] = system
# check if native or using docker-machine
if 'DOCKER_MACHINE_NAME' in os.environ:
# using docker-machine
docker_info['env'] = os.environ['DOCKER_MACHINE_NAME']
docker_info['type'] = 'docker-machine'
elif 'DOCKER_HOST' in os.environ:
# not native
docker_info['env'] = os.environ['DOCKER_HOST']
docker_info['type'] = 'remote'
else:
# using "local" server
docker_info['type'] = 'native'
return docker_info
def Containers(vent=True, running=True):
"""
Get containers that are created, by default limit to vent containers that
are running
"""
containers = []
try:
d_client = docker.from_env()
if vent:
c = d_client.containers.list(all=not running,
filters={'label': 'vent'})
else:
c = d_client.containers.list(all=not running)
for container in c:
containers.append((container.name, container.status))
except Exception as e: # pragma: no cover
logger.error("Docker problem " + str(e))
return containers
def Cpu():
""" Get number of available CPUs """
cpu = "Unknown"
try:
cpu = str(multiprocessing.cpu_count())
except Exception as e: # pragma: no cover
logger.error("Can't access CPU count' " + str(e))
return cpu
def Gpu(pull=False):
""" Check for support of GPUs, and return what's available """
gpu = (False, "")
try:
image = 'nvidia/cuda:8.0-runtime'
image_name, tag = image.split(":")
d_client = docker.from_env()
nvidia_image = d_client.images.list(name=image)
if pull and len(nvidia_image) == 0:
try:
d_client.images.pull(image_name, tag=tag)
nvidia_image = d_client.images.list(name=image)
except Exception as e: # pragma: no cover
logger.error("Something with the GPU went wrong " + str(e))
if len(nvidia_image) > 0:
cmd = 'nvidia-docker run --rm ' + image + ' nvidia-smi -L'
proc = Popen([cmd],
stdout=PIPE,
stderr=PIPE,
shell=True,
close_fds=True)
gpus = proc.stdout.read()
err = proc.stderr.read()
if gpus:
gpu_str = ""
for line in gpus.strip().split("\n"):
gpu_str += line.split(" (UUID: ")[0] + ", "
gpu = (True, gpu_str[:-2])
else:
if err:
gpu = (False, "Unknown", str(err))
else:
gpu = (False, "None")
else:
gpu = (False, "None")
except Exception as e: # pragma: no cover
gpu = (False, "Unknown", str(e))
return gpu
def GpuUsage(**kargs):
""" Get the current GPU usage of available GPUs """
usage = (False, None)
gpu_status = {'vent_usage': {'dedicated': [], 'mem_mb': {}}}
path_dirs = PathDirs(**kargs)
path_dirs.host_config()
template = Template(template=path_dirs.cfg_file)
# get running jobs using gpus
try:
d_client = docker.from_env()
c = d_client.containers.list(all=False,
filters={'label': 'vent-plugin'})
for container in c:
if ('vent.gpu' in container.attrs['Config']['Labels'] and
container.attrs['Config']['Labels']['vent.gpu'] == 'yes'):
device = container.attrs['Config']['Labels']['vent.gpu.device']
if ('vent.gpu.dedicated' in container.attrs['Config']['Labels'] and
container.attrs['Config']['Labels']['vent.gpu.dedicated'] == 'yes'):
gpu_status['vent_usage']['dedicated'].append(device)
elif 'vent.gpu.mem_mb' in container.attrs['Config']['Labels']:
if device not in gpu_status['vent_usage']['mem_mb']:
gpu_status['vent_usage']['mem_mb'][device] = 0
gpu_status['vent_usage']['mem_mb'][device] += int(container.attrs['Config']['Labels']['vent.gpu.mem_mb'])
except Exception as e: # pragma: no cover
logger.error("Could not get running jobs " + str(e))
port = '3476'
# default docker gateway
host = '172.17.0.1'
result = template.option('nvidia-docker-plugin', 'port')
if result[0]:
port = result[1]
result = template.option('nvidia-docker-plugin', 'host')
if result[0]:
host = result[1]
else:
try:
# now just requires ip, ifconfig
route = check_output(('ip', 'route')).split('\n')
default = ''
# grab the default network device.
for device in route:
if 'default' in device:
default = device.split()[4]
break
# grab the IP address for the default device
ip_addr = check_output(('ifconfig', default))
ip_addr = ip_addr.split('\n')[1].split()[1]
host = ip_addr
except Exception as e: # pragma: no cover
logger.error("Something with the ip addresses"
"went wrong " + str(e))
# have to get the info separately to determine how much memory is availabe
nd_url = 'http://' + host + ':' + port + '/v1.0/gpu/info/json'
try:
r = requests.get(nd_url)
if r.status_code == 200:
status = r.json()
for i, device in enumerate(status['Devices']):
gm = int(round(math.log(int(device['Memory']['Global']), 2)))
gpu_status[i] = {'global_memory': 2**gm,
'cores': device['Cores']}
else:
usage = (False, "Unable to get GPU usage request error code: " +
str(r.status_code))
except Exception as e: # pragma: no cover
usage = (False, "Error: " + str(e))
# get actual status of each gpu
nd_url = 'http://' + host + ':' + port + '/v1.0/gpu/status/json'
try:
r = requests.get(nd_url)
if r.status_code == 200:
status = r.json()
for i, device in enumerate(status['Devices']):
if i not in gpu_status:
gpu_status[i] = {}
gpu_status[i]['utilization'] = device['Utilization']
gpu_status[i]['memory'] = device['Memory']
gpu_status[i]['processes'] = device['Processes']
usage = (True, gpu_status)
else:
usage = (False, "Unable to get GPU usage request error code: " +
str(r.status_code))
except Exception as e: # pragma: no cover
usage = (False, "Error: " + str(e))
return usage
def Images(vent=True):
""" Get images that are build, by default limit to vent images """
images = []
# TODO needs to also check images in the manifest that couldn't have the
# label added
try:
d_client = docker.from_env()
if vent:
i = d_client.images.list(filters={'label': 'vent'})
else:
i = d_client.images.list()
for image in i:
images.append((image.tags[0], image.short_id))
except Exception as e: # pragma: no cover
logger.error("Something with the Images went wrong " + str(e))
return images
def Jobs():
"""
Get the number of jobs that are running and finished, and the number of
total tools running and finished for those jobs
"""
jobs = [0, 0, 0, 0]
# get running jobs
try:
d_client = docker.from_env()
c = d_client.containers.list(all=False,
filters={'label': 'vent-plugin'})
files = []
for container in c:
jobs[1] += 1
if 'file' in container.attrs['Config']['Labels']:
if container.attrs['Config']['Labels']['file'] not in files:
files.append(container.attrs['Config']['Labels']['file'])
jobs[0] = len(files)
except Exception as e: # pragma: no cover
logger.error("Could not get running jobs " + str(e))
# get finished jobs
try:
d_client = docker.from_env()
c = d_client.containers.list(all=True,
filters={'label': 'vent-plugin',
'status': 'exited'})
file_names = []
tool_names = []
finished_jobs = []
path_dirs = PathDirs()
manifest = os.path.join(path_dirs.meta_dir, "status.json")
if os.path.exists(manifest):
file_status = 'a'
else:
file_status = 'w'
# get a list of past jobs' file names if status.json exists
if file_status == 'a':
with open(manifest, 'r') as infile:
for line in infile:
finished_jobs.append(json.loads(line))
# get a list of file names so we can check against each container
file_names = [d['FileName'] for d in finished_jobs]
# multiple tools can run on 1 file. Use a tuple to status check
tool_names = [(d['FileName'], d['VentPlugin'])
for d in finished_jobs]
for container in c:
jobs[3] += 1
if 'file' in container.attrs['Config']['Labels']:
# make sure the file name and the tool tup exists because
# multiple tools can run on 1 file.
if (container.attrs['Config']['Labels']['file'],
container.attrs['Config']['Labels']['vent.name']) not in \
tool_names:
# TODO figure out a nicer way of getting desired values
# from containers.attrs.
new_file = {}
new_file['FileName'] = \
container.attrs['Config']['Labels']['file']
new_file['VentPlugin'] = \
container.attrs['Config']['Labels']['vent.name']
new_file['StartedAt'] = \
container.attrs['State']['StartedAt']
new_file['FinishedAt'] = \
container.attrs['State']['FinishedAt']
new_file['ID'] = \
container.attrs['Id'][:12]
# create/append a json file with all wanted information
with open(manifest, file_status) as outfile:
json.dump(new_file, outfile)
outfile.write("\n")
# delete any containers with 'vent-plugin' in the groups
if 'vent-plugin' in container.attrs['Config']['Labels']:
container.remove()
# add extra one to account for file that just finished if the file was
# just created since file_names is processed near the beginning
if file_status == 'w' and len(file_names) == 1:
jobs[2] = len(set(file_names)) + 1
else:
jobs[2] = len(set(file_names))
jobs[3] = jobs[3] - jobs[1]
except Exception as e: # pragma: no cover
logger.error("Could not get finished jobs " + str(e))
return tuple(jobs)
def Tools(**kargs):
""" Get tools that exist in the manifest """
path_dirs = PathDirs(**kargs)
manifest = os.path.join(path_dirs.meta_dir, "plugin_manifest.cfg")
template = Template(template=manifest)
tools = template.sections()
return tools[1]
def Services(core, vent=True, external=False, **kargs):
"""
Get services that have exposed ports, expects param core to be True or
False based on which type of services to return, by default limit to vent
containers and processes not running externally, if not limited by vent
containers, then core is ignored.
"""
services = []
path_dirs = PathDirs(**kargs)
template = Template(template=path_dirs.cfg_file)
services_uri = template.option("main", "services_uri")
try:
# look for internal services
if not external:
d_client = docker.from_env()
if vent:
c_filter = {'label': 'vent'}
containers = d_client.containers.list(filters=c_filter)
else:
containers = d_client.containers.list()
for c in containers:
uris = {}
name = None
if vent and 'vent.name' in c.attrs['Config']['Labels']:
if ((core and
'vent.groups' in c.attrs['Config']['Labels'] and
'core' in c.attrs['Config']['Labels']['vent.groups']) or
(not core and
'vent.groups' in c.attrs['Config']['Labels'] and
'core' not in c.attrs['Config']['Labels']['vent.groups'])):
name = c.attrs['Config']['Labels']['vent.name']
if name == '':
name = c.attrs['Config']['Labels']['vent.namespace'].split('/')[1]
for label in c.attrs['Config']['Labels']:
if label.startswith('uri'):
try:
val = int(label[-1])
if val not in uris:
uris[val] = {}
uris[val][label[:-1]] = c.attrs['Config']['Labels'][label]
except Exception as e: # pragma: no cover
logger.error("Malformed services section"
" in the template file "
+ str(e))
else:
name = c.name
if name and 'vent.repo' in c.attrs['Config']['Labels']:
name = c.attrs['Config']['Labels']['vent.repo'].split("/")[-1] + ": " + name
ports = c.attrs['NetworkSettings']['Ports']
p = []
port_num = 1
for port in ports:
if ports[port]:
try:
service_str = ''
if 'uri_prefix' in uris[port_num]:
service_str += uris[port_num]['uri_prefix']
host = ports[port][0]['HostIp']
if services_uri[0] and host == '0.0.0.0':
host = services_uri[1]
service_str += host + ":"
service_str += ports[port][0]['HostPort']
if 'uri_postfix' in uris[port_num]:
service_str += uris[port_num]['uri_postfix']
uri_creds = ''
if 'uri_user' in uris[port_num]:
uri_creds += " user:"
uri_creds += uris[port_num]['uri_user']
if 'uri_pw' in uris[port_num]:
uri_creds += " pw:"
uri_creds += uris[port_num]['uri_pw']
if uri_creds:
service_str += " - (" + uri_creds + " )"
p.append(service_str)
except Exception as e: # pragma: no cover
logger.info("No services defined for " + str(name) + " with exposed port " +
str(port_num) + " because: " + str(e))
port_num += 1
if p and name:
services.append((name, p))
logger.info(services)
# look for external services
else:
ext_tools = template.section('external-services')[1]
for ext_tool in ext_tools:
try:
name = ext_tool[0].lower()
p = []
settings_dict = json.loads(ext_tool[1])
if ('locally_active' in settings_dict and
settings_dict['locally_active'] == 'no'):
# default protocol to display will be http
protocol = 'http'
ip_address = ''
port = ''
for setting in settings_dict:
if setting == 'ip_address':
ip_address = settings_dict[setting]
if setting == 'port':
port = settings_dict[setting]
if setting == 'protocol':
protocol = settings_dict[setting]
p.append(protocol + '://' + ip_address + ':' + port)
if p and name:
services.append((name, p))
except Exception: # pragma: no cover
p = None
except Exception as e: # pragma: no cover
logger.error("Could not get services " + str(e))
return services
def Timestamp():
""" Get the current datetime in UTC """
timestamp = ""
try:
timestamp = str(datetime.datetime.now())+" UTC"
except Exception as e: # pragma: no cover
logger.error("Could not get current time " + str(e))
return timestamp
def Uptime():
""" Get the current uptime information """
uptime = ""
try:
uptime = str(check_output(["uptime"], close_fds=True))[1:]
except Exception as e: # pragma: no cover
logger.error("Could not get current uptime " + str(e))
return uptime
def DropLocation():
""" Get the directory that file drop is watching """
template = Template(template=PathDirs().cfg_file)
drop_loc = template.option("main", "files")[1]
drop_loc = os.path.expanduser(drop_loc)
drop_loc = os.path.abspath(drop_loc)
return (True, drop_loc)
def ParsedSections(file_val):
"""
Get the sections and options of a file returned as a dictionary
"""
try:
template_dict = {}
cur_section = ''
for val in file_val.split("\n"):
val = val.strip()
if val != '':
section_match = re.match(r"\[.+\]", val)
if section_match:
cur_section = section_match.group()[1:-1]
template_dict[cur_section] = {}
else:
option, value = val.split('=', 1)
option = option.strip()
value = value.strip()
if option.startswith('#'):
template_dict[cur_section][val] = ''
else:
template_dict[cur_section][option] = value
except Exception: # pragma: no cover
template_dict = {}
return template_dict
def Dependencies(tools):
"""
Takes in a list of tools that are being updated and returns any tools that
depend on linking to them
"""
dependencies = []
if tools:
path_dirs = PathDirs()
man = Template(os.path.join(path_dirs.meta_dir, 'plugin_manifest.cfg'))
for section in man.sections()[1]:
# don't worry about dealing with tool if it's not running
running = man.option(section, 'running')
if not running[0] or running[1] != 'yes':
continue
t_name = man.option(section, 'name')[1]
t_branch = man.option(section, 'branch')[1]
t_version = man.option(section, 'version')[1]
t_identifier = {'name': t_name,
'branch': t_branch,
'version': t_version}
options = man.options(section)[1]
if 'docker' in options:
d_settings = json.loads(man.option(section,
'docker')[1])
if 'links' in d_settings:
for link in json.loads(d_settings['links']):
if link in tools:
dependencies.append(t_identifier)
return dependencies
| apache-2.0 | -225,670,770,956,904,030 | 37.617438 | 125 | 0.492236 | false | 4.37737 | true | false | false |
kurtraschke/camelot | camelot/__init__.py | 1 | 1312 | # ============================================================================
#
# Copyright (C) 2007-2010 Conceptive Engineering bvba. All rights reserved.
# www.conceptive.be / [email protected]
#
# This file is part of the Camelot Library.
#
# This file may be used under the terms of the GNU General Public
# License version 2.0 as published by the Free Software Foundation
# and appearing in the file license.txt included in the packaging of
# this file. Please review this information to ensure GNU
# General Public Licensing requirements will be met.
#
# If you are unsure which license is appropriate for your use, please
# visit www.python-camelot.com or contact [email protected]
#
# This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
# WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
#
# For use of this library in commercial applications, please contact
# [email protected]
#
# ============================================================================
"""Camelot is a python GUI framework on top of Elixir / Sqlalchemy inspired by
the Django admin interface. Start building applications at warp speed, simply
by adding some additional information to you Elixir model."""
__version__ = 'trunk'
| gpl-2.0 | 3,956,184,204,709,217,000 | 42.733333 | 79 | 0.674543 | false | 4.087227 | false | false | false |
317070/kaggle-heart | configurations/je_test.py | 1 | 9741 | from deep_learning_layers import ConvolutionOver2DAxisLayer, MaxPoolOverAxisLayer, MaxPoolOver2DAxisLayer, \
MaxPoolOver3DAxisLayer, ConvolutionOver3DAxisLayer, ConvolutionOverAxisLayer
from default import *
import functools
import theano.tensor as T
from layers import MuLogSigmaErfLayer, CumSumLayer
import layers
import objectives
from lasagne.layers import InputLayer, reshape, DenseLayer, DenseLayer, batch_norm
from postprocess import upsample_segmentation
from volume_estimation_layers import GaussianApproximationVolumeLayer
import theano_printer
from updates import build_adam_updates
import image_transform
caching = None
validate_every = 10
validate_train_set = False
save_every = 10
restart_from_save = False
batches_per_chunk = 2
batch_size = 8
sunny_batch_size = 4
num_epochs_train = 60
image_size = 128
learning_rate_schedule = {
0: 0.1,
2: 0.01,
10: 0.001,
50: 0.0001,
60: 0.00001,
}
from postprocess import postprocess_onehot, postprocess
from preprocess import preprocess, preprocess_with_augmentation, set_upside_up, normalize_contrast, preprocess_normscale, normalize_contrast_zmuv
use_hough_roi = True
preprocess_train = functools.partial( # normscale_resize_and_augment has a bug
preprocess_normscale,
normscale_resize_and_augment_function=partial(
image_transform.normscale_resize_and_augment_2,
normalised_patch_size=(80 ,80)))
#preprocess_train = preprocess_normscale
preprocess_validation = preprocess # no augmentation
preprocess_test = preprocess_with_augmentation # no augmentation
test_time_augmentations = 10
augmentation_params = {
"rotate": (0, 0),
"shear": (0, 0),
"translate_x": (0, 0),
"translate_y": (0, 0),
"flip_vert": (0, 0),
"zoom_x": (.75, 1.25),
"zoom_y": (.75, 1.25),
"change_brightness": (-0.3, 0.3),
}
cleaning_processes = [
set_upside_up,]
cleaning_processes_post = [
partial(normalize_contrast_zmuv, z=2)]
build_updates = build_adam_updates
postprocess = postprocess
nr_slices = 20
data_sizes = {
"sliced:data:randomslices": (batch_size, nr_slices, 30, image_size, image_size),
"sliced:data:sax:locations": (batch_size, nr_slices),
"sliced:data:sax:is_not_padded": (batch_size, nr_slices),
"sliced:data:sax": (batch_size, nr_slices, 30, image_size, image_size),
"sliced:data:ax": (batch_size, 30, 15, image_size, image_size), # 30 time steps, 20 mri_slices, 100 px wide, 100 px high,
"sliced:data:ax:noswitch": (batch_size, 15, 30, image_size, image_size), # 30 time steps, 20 mri_slices, 100 px wide, 100 px high,
"area_per_pixel:sax": (batch_size, ),
"sliced:data:singleslice": (batch_size, 30, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice:middle": (batch_size, 30, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:shape": (batch_size, 2,),
"sunny": (sunny_batch_size, 1, image_size, image_size)
# TBC with the metadata
}
check_inputs = False
def build_model():
#################
# Regular model #
#################
input_key = "sliced:data:singleslice:middle"
data_size = data_sizes[input_key]
l0 = InputLayer(data_size)
l0r = batch_norm(reshape(l0, (-1, 1, ) + data_size[1:]))
# (batch, channel, axis, time, x, y)
# convolve over time
l1 = batch_norm(ConvolutionOverAxisLayer(l0r, num_filters=8, filter_size=(3,), axis=(3,), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.0),
))
l1m = batch_norm(MaxPoolOverAxisLayer(l1, pool_size=(4,), axis=(3,)))
# convolve over x and y
l2a = batch_norm(ConvolutionOver2DAxisLayer(l1m, num_filters=8, filter_size=(3, 3),
axis=(4,5), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.0),
))
l2b = batch_norm(ConvolutionOver2DAxisLayer(l2a, num_filters=8, filter_size=(3, 3),
axis=(4,5), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.0),
))
l2m = batch_norm(MaxPoolOver2DAxisLayer(l2b, pool_size=(2, 2), axis=(4,5)))
# convolve over x, y, time
l3a = batch_norm(ConvolutionOver3DAxisLayer(l2m, num_filters=32, filter_size=(3, 3, 3),
axis=(3,4,5), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
))
l3b = batch_norm(ConvolutionOver2DAxisLayer(l3a, num_filters=32, filter_size=(3, 3),
axis=(4,5), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
))
l3m = batch_norm(MaxPoolOver2DAxisLayer(l3b, pool_size=(2, 2), axis=(4,5)))
# convolve over time
l4 = batch_norm(ConvolutionOverAxisLayer(l3m, num_filters=32, filter_size=(3,), axis=(3,), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
))
l4m = batch_norm(MaxPoolOverAxisLayer(l4, pool_size=(2,), axis=(2,)))
# maxpool over axis
l5 = batch_norm(MaxPoolOverAxisLayer(l3m, pool_size=(4,), axis=(2,)))
# convolve over x and y
l6a = batch_norm(ConvolutionOver2DAxisLayer(l5, num_filters=128, filter_size=(3, 3),
axis=(4,5), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
))
l6b = batch_norm(ConvolutionOver2DAxisLayer(l6a, num_filters=128, filter_size=(3, 3),
axis=(4,5), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
))
l6m = batch_norm(MaxPoolOver2DAxisLayer(l6b, pool_size=(2, 2), axis=(4,5)))
# convolve over time and x,y, is sparse reduction layer
l7 = ConvolutionOver3DAxisLayer(l6m, num_filters=32, filter_size=(3,3,3), axis=(3,4,5), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
)
key_scale = "area_per_pixel:sax"
l_scale = InputLayer(data_sizes[key_scale])
# Systole Dense layers
ldsys1 = lasagne.layers.DenseLayer(l7, num_units=512,
W=lasagne.init.Orthogonal("relu"),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.rectify)
ldsys1drop = lasagne.layers.dropout(ldsys1, p=0.5)
ldsys2 = lasagne.layers.DenseLayer(ldsys1drop, num_units=128,
W=lasagne.init.Orthogonal("relu"),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.rectify)
ldsys2drop = lasagne.layers.dropout(ldsys2, p=0.5)
ldsys3 = lasagne.layers.DenseLayer(ldsys2drop, num_units=1,
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity)
l_systole = layers.MuConstantSigmaErfLayer(layers.ScaleLayer(ldsys3, scale=l_scale), sigma=0.0)
# Diastole Dense layers
lddia1 = lasagne.layers.DenseLayer(l7, num_units=512,
W=lasagne.init.Orthogonal("relu"),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.rectify)
lddia1drop = lasagne.layers.dropout(lddia1, p=0.5)
lddia2 = lasagne.layers.DenseLayer(lddia1drop, num_units=128,
W=lasagne.init.Orthogonal("relu"),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.rectify)
lddia2drop = lasagne.layers.dropout(lddia2, p=0.5)
lddia3 = lasagne.layers.DenseLayer(lddia2drop, num_units=1,
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity)
l_diastole = layers.MuConstantSigmaErfLayer(layers.ScaleLayer(lddia3, scale=l_scale), sigma=0.0)
return {
"inputs":{
input_key: l0,
key_scale: l_scale,
},
"outputs": {
"systole": l_systole,
"diastole": l_diastole,
},
"regularizable": {
ldsys1: l2_weight,
ldsys2: l2_weight,
ldsys3: l2_weight,
lddia1: l2_weight,
lddia2: l2_weight,
lddia3: l2_weight,
},
}
l2_weight = 0.0005
def build_objective(interface_layers):
# l2 regu on certain layers
l2_penalty = lasagne.regularization.regularize_layer_params_weighted(interface_layers["regularizable"], lasagne.regularization.l2)
# build objective
return objectives.KaggleObjective(interface_layers["outputs"], penalty=l2_penalty)
| mit | 2,966,186,508,157,458,400 | 40.987069 | 145 | 0.561955 | false | 3.50018 | false | false | false |
wuzheng-sjtu/FastFPN | libs/datasets/city.py | 1 | 5856 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
import tensorflow.contrib.slim as slim
from tensorflow.python.lib.io.tf_record import TFRecordCompressionType
_FILE_PATTERN = 'city_%s_*.tfrecord'
SPLITS_TO_SIZES = {'train': 2975, 'val': 500}
_NUM_CLASSES = 11
_ITEMS_TO_DESCRIPTIONS = {
'image': 'A color image of varying size.',
#'label': 'An annotation image of varying size. (pixel-level masks)',
#'gt_masks': 'masks of instances in this image. (instance-level masks), of shape (N, image_height, image_width)',
'gt_boxes': 'bounding boxes and classes of instances in this image, of shape (N, 5), each entry is (x1, y1, x2, y2)',
}
#map from the class name to id, should be put into a City class
def cls2id(cls):
clsdict = {'person':1, 'rider':2, 'car':3,'truck':4,'bus':5,'caravan':6, \
'trailer':7,'train':8,'motorcycle':9,'bicycle':10}
return clsdict[cls]
def get_split(split_name, dataset_dir, file_pattern=None, reader=None):
if split_name not in SPLITS_TO_SIZES:
raise ValueError('split name %s was not recognized.' % split_name)
if not file_pattern:
file_pattern = _FILE_PATTERN
file_pattern = os.path.join(dataset_dir, 'records', file_pattern % split_name)
# Allowing None in the signature so that dataset_factory can use the default.
if reader is None:
reader = tf.TFRecordReader
keys_to_features = {
'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''),
'image/format': tf.FixedLenFeature((), tf.string, default_value='png'),
#'label/encoded': tf.FixedLenFeature((), tf.string, default_value=''),
#'label/format': tf.FixedLenFeature((), tf.string, default_value='png'),
'image/height': tf.FixedLenFeature((), tf.int64),
'image/width': tf.FixedLenFeature((), tf.int64),
'label/num_instances': tf.FixedLenFeature((), tf.int64),
'label/gt_boxes': tf.FixedLenFeature((), tf.string),
#'label/gt_masks': tf.FixedLenFeature((), tf.string),
}
# def _masks_decoder(keys_to_tensors):
# masks = tf.decode_raw(keys_to_tensors['label/gt_masks'], tf.uint8)
# width = tf.cast(keys_to_tensors['image/width'], tf.int32)
# height = tf.cast(keys_to_tensors['image/height'], tf.int32)
# instances = tf.cast(keys_to_tensors['label/num_instances'], tf.int32)
# mask_shape = tf.stack([instances, height, width])
# return tf.reshape(masks, mask_shape)
def _gt_boxes_decoder(keys_to_tensors):
bboxes = tf.decode_raw(keys_to_tensors['label/gt_boxes'], tf.float32)
instances = tf.cast(keys_to_tensors['label/num_instances'], tf.int32)
bboxes_shape = tf.stack([instances, 5])
return tf.reshape(bboxes, bboxes_shape)
def _width_decoder(keys_to_tensors):
width = keys_to_tensors['image/width']
return tf.cast(width, tf.int32)
def _height_decoder(keys_to_tensors):
height = keys_to_tensors['image/height']
return tf.cast(height, tf.int32)
items_to_handlers = {
'image': slim.tfexample_decoder.Image('image/encoded', 'image/format'),
#'label': slim.tfexample_decoder.Image('label/encoded', 'label/format', channels=1),
#'gt_masks': slim.tfexample_decoder.ItemHandlerCallback(
# ['label/gt_masks', 'label/num_instances', 'image/width', 'image/height'], _masks_decoder),
'gt_boxes': slim.tfexample_decoder.ItemHandlerCallback(['label/gt_boxes', 'label/num_instances'], _gt_boxes_decoder),
'width': slim.tfexample_decoder.ItemHandlerCallback(['image/width'], _width_decoder),
'height': slim.tfexample_decoder.ItemHandlerCallback(['image/height'], _height_decoder),
}
decoder = slim.tfexample_decoder.TFExampleDecoder(
keys_to_features, items_to_handlers)
return slim.dataset.Dataset(
data_sources=file_pattern,
reader=reader,
decoder=decoder,
num_samples=SPLITS_TO_SIZES[split_name],
items_to_descriptions=_ITEMS_TO_DESCRIPTIONS,
num_classes=_NUM_CLASSES)
def read(tfrecords_filename):
if not isinstance(tfrecords_filename, list):
tfrecords_filename = [tfrecords_filename]
filename_queue = tf.train.string_input_producer(
tfrecords_filename, num_epochs=100)
options = tf.python_io.TFRecordOptions(TFRecordCompressionType.ZLIB)
reader = tf.TFRecordReader(options=options)
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
features={
'image/img_id': tf.FixedLenFeature([], tf.int64),
'image/encoded': tf.FixedLenFeature([], tf.string),
'image/height': tf.FixedLenFeature([], tf.int64),
'image/width': tf.FixedLenFeature([], tf.int64),
'label/num_instances': tf.FixedLenFeature([], tf.int64),
#'label/gt_masks': tf.FixedLenFeature([], tf.string),
'label/gt_boxes': tf.FixedLenFeature([], tf.string),
#'label/encoded': tf.FixedLenFeature([], tf.string),
})
# image = tf.image.decode_jpeg(features['image/encoded'], channels=3)
img_id = tf.cast(features['image/img_id'], tf.int32)
ih = tf.cast(features['image/height'], tf.int32)
iw = tf.cast(features['image/width'], tf.int32)
num_instances = tf.cast(features['label/num_instances'], tf.int32)
image = tf.decode_raw(features['image/encoded'], tf.uint8)
imsize = tf.size(image)
image = tf.cond(tf.equal(imsize, ih * iw), \
lambda: tf.image.grayscale_to_rgb(tf.reshape(image, (ih, iw, 1))), \
lambda: tf.reshape(image, (ih, iw, 3)))
gt_boxes = tf.decode_raw(features['label/gt_boxes'], tf.float32)
gt_boxes = tf.reshape(gt_boxes, [num_instances, 5])
#gt_masks = tf.decode_raw(features['label/gt_masks'], tf.uint8)
#gt_masks = tf.cast(gt_masks, tf.int32)
#gt_masks = tf.reshape(gt_masks, [num_instances, ih, iw])
return image, ih, iw, gt_boxes, num_instances, img_id
| apache-2.0 | -4,368,693,146,969,014,000 | 41.434783 | 121 | 0.678791 | false | 3.18088 | false | false | false |
umlfri/umlfri2 | umlfri2/ufl/types/structured/variablemetadata.py | 1 | 2846 | from ..base.type import UflType, UflAttributeDescription
class UflVariableMetadataType(UflType):
def __init__(self, metadata_type, underlying_type):
metadata = {}
for name, type in metadata_type.items():
metadata[name] = UflAttributeDescription(name, type)
next_type = underlying_type
next_prefix = '{0}.'.format(UflVariableWithMetadataType.VALUE_ATTRIBUTE)
while isinstance(next_type, UflVariableWithMetadataType):
for name, type in next_type.metadata_types:
if name not in metadata:
metadata[name] = UflAttributeDescription(next_prefix + name, type)
next_type = next_type.underlying_type
next_prefix = '{0}.{1}'.format(UflVariableWithMetadataType.VALUE_ATTRIBUTE, next_prefix)
self.ALLOWED_DIRECT_ATTRIBUTES = metadata
@property
def is_immutable(self):
return True
def __str__(self):
return "[VariableMetadata {0}]".format(", ".join(self.ALLOWED_DIRECT_ATTRIBUTES.keys()))
class UflVariableWithMetadataType(UflType):
VALUE_ATTRIBUTE = 'value'
def __init__(self, underlying_type, **metadata_types):
self.__underlying_type = underlying_type
self.__metadata_types = metadata_types
# use with caution, only for recursive metadata
def _add_metadata_type(self, name, type):
self.__metadata_types[name] = type
@property
def metadata_types(self):
yield from self.__metadata_types.items()
@property
def underlying_type(self):
return self.__underlying_type
@property
def metadata_type(self):
return UflVariableMetadataType(self.__metadata_types, self.__underlying_type)
def is_equatable_to(self, other):
if isinstance(other, UflVariableWithMetadataType):
return self.__underlying_type.is_equatable_to(other.__underlying_type)
else:
return self.__underlying_type.is_equatable_to(other)
def is_comparable_with(self, other):
if isinstance(other, UflVariableWithMetadataType):
return self.__underlying_type.is_comparable_with(other.__underlying_type)
else:
return self.__underlying_type.is_comparable_with(other)
def is_convertible_to(self, other):
return self.__underlying_type.is_convertible_to(other)
def resolve_unknown_generic(self, generics_cache):
raise Exception("Generic variable metadata is a nonsense, sorry.")
def resolve_generic(self, actual_type, generics_cache):
raise Exception("Generic variable metadata is a nonsense, sorry.")
def __str__(self):
return "[VariableWithMetadata {0} {1}]".format(repr(self.__underlying_type), ", ".join(self.__metadata_types.keys()))
| gpl-3.0 | -1,647,157,628,922,019,800 | 36.946667 | 125 | 0.64617 | false | 4.071531 | false | false | false |
anotherjesse/nova | nova/tests/glance/stubs.py | 1 | 1140 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import StringIO
import glance.client
def stubout_glance_client(stubs, cls):
"""Stubs out glance.client.Client"""
stubs.Set(glance.client, 'Client',
lambda *args, **kwargs: cls(*args, **kwargs))
class FakeGlance(object):
def __init__(self, host, port=None, use_ssl=False):
pass
def get_image(self, image):
meta = {
'size': 0,
}
image_file = StringIO.StringIO('')
return meta, image_file
| apache-2.0 | 5,501,775,592,373,386,000 | 29.810811 | 78 | 0.662281 | false | 3.838384 | false | false | false |
jonaskje/cchef-math | templates.py | 1 | 4062 | # vim: set tabstop=8 shiftwidth=8 expandtab:
types = dict (
double = dict(
name = 'double',
zero = '0.0',
one = '1.0',
sqrt = 'sqrt',
typeSuffix = 'd',
),
float = dict(
name = 'float',
zero = '0.0f',
one = '1.0f',
sqrt = 'sqrtf',
typeSuffix = '',
),
int32 = dict(
name = 'int32_t',
zero = '0',
one = '1',
typeSuffix = 'i',
),
)
variants = [
dict(
dataType = 'Vec2',
elemType = types['float'],
funcPrefix = 'v2',
components = ['x', 'y'],
), dict(
dataType = 'Vec3',
elemType = types['float'],
funcPrefix = 'v3',
components = ['x', 'y', 'z'],
), dict(
dataType = 'Vec4',
elemType = types['float'],
funcPrefix = 'v4',
components = ['x', 'y', 'z', 'w'],
), dict(
dataType = 'Vec2',
elemType = types['double'],
funcPrefix = 'v2',
components = ['x', 'y'],
), dict(
dataType = 'Vec3',
elemType = types['double'],
funcPrefix = 'v3',
components = ['x', 'y', 'z'],
), dict(
dataType = 'Vec4',
elemType = types['double'],
funcPrefix = 'v4',
components = ['x', 'y', 'z', 'w'],
), dict(
dataType = 'Vec2',
elemType = types['int32'],
funcPrefix = 'v2',
components = ['x', 'y'],
), dict(
dataType = 'Vec3',
elemType = types['int32'],
funcPrefix = 'v3',
components = ['x', 'y', 'z'],
), dict(
dataType = 'Vec4',
elemType = types['int32'],
funcPrefix = 'v4',
components = ['x', 'y', 'z', 'w'],
),
]
functions = [
###
dict(
match = '.*',
func = [
('New',
'$T $P$N(%%", ".join([("$E " + e) for e in variant["components"]])%%)',
""" {
const $T r = { %%", ".join([e for e in variant["components"]])%% };
return r;
}
"""),
('NewZeroLength',
'$T $P$N(void)',
""" {
const $T r = { $(zero) };
return r;
}
"""),
('Sum',
'$T $P$N($T a, $T b)',
""" {
const $T r = { %%join('a', 'b', ' + ', ', ')%% };
return r;
}
"""),
('Diff',
'$T $P$N($T a, $T b)',
""" {
const $T r = { %%join('a', 'b', ' - ', ', ')%% };
return r;
}
"""),
('Product',
'$T $P$N($T a, $E scale)',
""" {
const $T r = { %%join('a', '#scale', ' * ', ', ')%% };
return r;
}
"""),
('DotProduct',
'$E $P$N($T a, $T b)',
""" {
return %%join('a', 'b', ' * ', ' + ')%%;
}
"""),
('Norm',
'$E $P$N($T a)',
""" {
return %%join('a', 'a', ' * ', ' + ')%%;
}
"""),
]),
### Functions that have no meaning for integers
dict(
match = 'Vec.[^i]?$',
func = [
('Length',
'$E $P$N($T a)',
""" {
return $(sqrt)(%%join('a', 'a', ' * ', ' + ')%%);
}
"""),
('UnitVector',
'$T $P$N($T a)',
""" {
$E length = $PLength(a);
if (length > $(zero)) {
const $T r = { %%join('a', '#length', '/', ' , ')%% };
return r;
} else {
const $T r = { $(zero) };
return r;
}
}
"""),
]),
###
dict(
match = 'Vec2',
func = [
('Area',
'$E $P$N($T a)',
""" {
return a.$0 * a.$1;
}
"""),
]),
###
dict(
match = 'Vec4',
func = [
('NewFromVec3',
'$T $P$N$(typeSuffix)(Vec3$(typeSuffix) xyz, $E w)',
""" {
const $T r = { xyz.$0, xyz.$1, xyz.$2, w };
return r;
}
"""),
('ToVec3',
'Vec3$(typeSuffix) $P$N$(typeSuffix)($T a)',
""" {
const Vec3$(typeSuffix) r = { a.$0, a.$1, a.$2 };
return r;
}
"""),
]),
###
]
headerIntro = """
#pragma once
/* AUTO-GENERATED DO NOT EDIT */
#include <stdint.h>
"""
sourceIntro = """
/* AUTO-GENERATED DO NOT EDIT */
#include "$(headerFilename)"
#include <math.h>
#include <float.h>
"""
| mit | 1,239,360,678,374,717,200 | 17.547945 | 75 | 0.371738 | false | 2.97147 | false | false | false |
jsouza/pamtl | src/mtl/partl_regression.py | 1 | 6972 | from itertools import izip
import numpy as np
import scipy as sp
from sklearn.base import BaseEstimator, RegressorMixin
from sklearn.utils import check_array, extmath
__author__ = 'desouza'
class PARTLRegressor(BaseEstimator, RegressorMixin):
"""
Online Task Relationship Learning algorithm proposed by Saha et al. in
which the task
relationship
matrix is dinamically learnt. Here loss is Hinge instead of Perceptron.
"""
def __init__(self, task_num, feats_num, rounds_num=100, C=1, epsilon=0.01,
loss="pai", divergence="cov", eta=0.01, n_iter=1,
centered=True):
self.feats_num = feats_num
self.task_num = task_num
if self.task_num < 1:
raise ValueError("number of tasks must be greater than 1.")
# initialize interaction matrix with independent learners
self.A = 1.0 / task_num * np.identity(task_num)
# hyper-parameters
self.C = C
self.epsilon = epsilon
self.loss = loss
self.divergence = divergence
self.n_iter = n_iter
self.eta = eta
# number of rounds of priming A
self.rounds_num = rounds_num
self.centered = centered
# initialize w's with d x K positions (d = feats, K = tasks)
self.coef_ = np.zeros(self.feats_num * self.task_num)
# averaged model
self.avg_coef_ = np.copy(self.coef_)
# number of instances seen
self.t = 0
# number of instances discarded (zero-arrays)
self.discarded = 0
def _pa(self, loss_t, x_t):
denom = extmath.norm(x_t) ** 2.0
# special case when L_2 norm of x_t is zero (followed libol
# implementation)
if denom == 0:
return 1
d = loss_t / denom
return d
def _pai(self, loss_t, x_t):
pa = self._pa(loss_t, x_t)
if self.C < pa:
return self.C
return pa
def _paii(self, loss_t, x_t):
return loss_t / (extmath.norm(x_t) ** 2.0) + 1.0 / 2.0 * self.C
def _get_task_id(self, X_inst, feats_num):
a = np.nonzero(X_inst != 0)
first_element = a[0][0]
task_num = first_element / feats_num
return task_num
def _sym(self, X):
temp = (X + X.T) / 2.0
return temp
def _batch_opt(self, W):
num = np.dot(W.T, W) ** (1.0 / 2.0)
denom = np.trace(num)
bo = num / denom
return bo
def _log_det_div(self, W):
prev_a_inv = np.linalg.inv(self.A)
# prev_a_inv = np.linalg.pinv(self.A)
WT = W.T
dot_w = np.dot(WT, W)
symdot = self._sym(dot_w)
log_det = prev_a_inv + self.eta * symdot
# return log_det
log_det_inv = np.linalg.inv(log_det)
# log_det_inv = np.linalg.pinv(log_det)
return log_det_inv
def _von_neumann_div(self, W):
dot_w = np.dot(W.T, W)
# log_a = np.log(self.A) - self.eta * self._sym(dot_w)
log_a = sp.linalg.logm(self.A) - self.eta * self._sym(dot_w)
# exp_log_a = np.exp(log_a)
exp_log_a = sp.linalg.expm(log_a)
return exp_log_a
def fit(self, X, y):
X = check_array(X)
y = check_array(y)
for x_i, y_i in izip(X, y):
self.partial_fit(x_i.reshape(-1, 1), y_i.reshape(1, -1))
return self
def partial_fit(self, X_t, y_t):
# if all features are zero, discard example
if np.sum(X_t) == 0:
self.discarded += 1
return self
# updates the number of instances seen
self.t += 1
reg_func = self._pai
if self.loss == "pa":
reg_func = self._pa
elif self.loss == "pai":
reg_func = self._pai
elif self.loss == "paii":
reg_func = self._paii
for _ in xrange(self.n_iter):
# gets prediction based on current model
y_hat_t = np.dot(self.coef_, X_t.T)
# calculates difference between prediction and actual value
discrepancy_t = np.abs(y_hat_t - y_t)
#print discrepancy_t.shape, discrepancy_t
# wx_dot = np.dot(self.coef_, X_t)
# y_hat_t = np.sign(wx_dot)
# loss_t = max([0, (1 - y_t * wx_dot)])
# computes epsilon-hinge loss
loss_t = 0
if discrepancy_t > self.epsilon:
loss_t = discrepancy_t - self.epsilon
tau_t = reg_func(loss_t, X_t)
task_id = self._get_task_id(X_t, self.feats_num)
for task in xrange(self.task_num):
# for indexing task weights that change in the for loop
begin = task * self.feats_num
end = begin + self.feats_num
# for indexing the task of X_t
tbegin = task_id * self.feats_num
tend = tbegin + self.feats_num
# computes new coefs
new_coef = np.sign(y_t - y_hat_t) * self.A[
task, task_id] * tau_t * X_t[tbegin:tend]
# updates coefs
self.coef_[begin:end] += new_coef
self.avg_coef_ += self.coef_
# updates A
if self.t >= self.rounds_num:
# first, reshape coefs (w in the paper) to W
# which is the d x K matrix where d are the
# features and K the different tasks
w = np.copy(self.coef_)
W = w.reshape((self.task_num, self.feats_num)).T
# update interaction matrix
if self.divergence == "cov":
covA = np.cov(W, rowvar=0)
if self.centered:
self.A = covA
else:
self.A = np.linalg.inv(covA)
elif self.divergence == "corrcoef":
corrcoefW = np.corrcoef(W, rowvar=0)
if self.centered:
self.A = corrcoefW
else:
self.A = np.linalg.inv(corrcoefW)
elif self.divergence == "vn":
self.A = self._von_neumann_div(W)
elif self.divergence == "ld":
self.A = self._log_det_div(W)
elif self.divergence == "bo":
self.A = self._batch_opt(W)
else:
raise ValueError("divergence mode not valid")
# np.fill_diagonal(self.A, 1)
return self
def predict(self, X, averaged=False):
X = check_array(X)
# if self.fit_intercept:
# X = np.column_stack((X, np.ones(X.shape[0])))
# print self.coef_.shape
# print X.shape
if not averaged:
y_preds = np.dot(self.coef_, X.T)
else:
y_preds = np.dot(self.avg_coef_, X.T)
return y_preds
| mit | 4,134,933,629,266,708,000 | 28.294118 | 78 | 0.505307 | false | 3.480779 | false | false | false |
liumengjun/django-static-precompiler | static_precompiler/management/commands/compilestatic.py | 1 | 3299 | import os
import sys
import django
import django.contrib.staticfiles.finders
import django.core.files.storage
import django.core.management.base
from static_precompiler import exceptions, settings, utils
def get_scanned_dirs():
dirs = set()
if settings.STATIC_ROOT:
dirs.add(settings.STATIC_ROOT)
for finder in django.contrib.staticfiles.finders.get_finders():
if hasattr(finder, "storages"):
for storage in finder.storages.values():
if isinstance(storage, django.core.files.storage.FileSystemStorage):
dirs.add(storage.location)
return sorted(dirs)
class Command(django.core.management.base.BaseCommand):
help = "Compile static files."
requires_model_validation = False
def add_arguments(self, parser):
parser.add_argument(
"--watch",
action="store_true",
dest="watch",
default=False,
help="Watch for changes and recompile if necessary."
)
parser.add_argument(
"--no-initial-scan",
action="store_false",
dest="initial_scan",
default=True,
help="Skip the initial scan of watched directories in --watch mode."
)
def handle(self, **options):
if not options["watch"] and not options["initial_scan"]:
sys.exit("--no-initial-scan option should be used with --watch.")
scanned_dirs = get_scanned_dirs()
verbosity = int(options["verbosity"])
compilers = utils.get_compilers().values()
if not options["watch"] or options["initial_scan"]:
# Scan the watched directories and compile everything
for scanned_dir in scanned_dirs:
for dirname, dirnames, filenames in os.walk(scanned_dir):
for filename in filenames:
path = os.path.join(dirname, filename)[len(scanned_dir):]
if path.startswith("/"):
path = path[1:]
for compiler in compilers:
if compiler.is_supported(path):
try:
compiler.handle_changed_file(path, verbosity=verbosity)
except (exceptions.StaticCompilationError, ValueError) as e:
print(e)
break
if options["watch"]:
from static_precompiler.watch import watch_dirs
watch_dirs(scanned_dirs, verbosity)
if django.VERSION < (1, 8):
import optparse
Command.option_list = django.core.management.base.NoArgsCommand.option_list + (
optparse.make_option("--watch",
action="store_true",
dest="watch",
default=False,
help="Watch for changes and recompile if necessary."),
optparse.make_option("--no-initial-scan",
action="store_false",
dest="initial_scan",
default=True,
help="Skip the initial scan of watched directories in --watch mode.")
)
| mit | -8,868,561,066,244,211,000 | 35.655556 | 98 | 0.543498 | false | 4.960902 | false | false | false |
agoose77/hivesystem | tutorial/layers/layer5/layershive.py | 2 | 3743 | # translation of layers.hivemap to hive system Python code
#find out where the hivemaps are
import os
import hivemaps
hivemapsdir = os.path.split(hivemaps.__file__)[0]
if not len(hivemapsdir): hivemapsdir = "."
action1hivemapfile = os.path.join(hivemapsdir, "action1.hivemap")
action2hivemapfile = os.path.join(hivemapsdir, "action2.hivemap")
#load the hivemaps
import spyder, Spyder
action1hivemap = Spyder.Hivemap.fromfile(action1hivemapfile)
action2hivemap = Spyder.Hivemap.fromfile(action2hivemapfile)
from bee.spyderhive.hivemaphive import hivemapframe
class action1hivemaphive(hivemapframe):
hivemap = action1hivemap
class action2hivemaphive(hivemapframe):
hivemap = action2hivemap
"""
We could also put both hivemaps into a single hivemaphive:
class actionshivemaphive(hivemapframe):
act1 = action1hivemap
act2 = action2hivemap
In that case, we should replace in the hive below:
action1 = action1hivemaphive()
action2 = action2hivemaphive()
=> actions = actionshivemaphive
and:
(action1,"hivemap","soundplay")
=> (actions,"act1","soundplay")
(action2,"hivemap","actionplay")
=> (actions,"act2","actionplay")
"""
###
#load the action3 hive
from workers.action3 import action3hive
#define the "layers" hive
import bee
from dragonfly.std import *
import dragonfly.io
import dragonfly.sys
from bee import connect
class layershive(bee.frame):
#START message
variable_str_1 = variable("str")("START")
#or:
# variable_str_1 = variable_str("START")
transistor_5 = transistor("str")()
connect(variable_str_1, transistor_5)
startsensor_1 = dragonfly.sys.startsensor()
connect(startsensor_1, transistor_5)
#or:
# connect(startsensor_1.outp, transistor_5.trig)
display_1 = dragonfly.io.display("str")()
connect(transistor_5, display_1)
#or:
# connect(transistor_5.outp, display_1.inp)
#action 1
action1 = action1hivemaphive()
vwalk = variable("id")("walk")
keyW = dragonfly.io.keyboardsensor_trigger("W")
transistor_1 = transistor("id")()
connect(vwalk, transistor_1)
connect(keyW, transistor_1)
connect(transistor_1, (action1, "hivemap", "animplay"))
connect(transistor_1, (action1, "hivemap", "soundplay"))
vjump = variable("id")("jump")
keyTAB = dragonfly.io.keyboardsensor_trigger("TAB")
transistor_2 = transistor("id")()
connect(vjump, transistor_2)
connect(keyTAB, transistor_2)
connect(transistor_2, (action1, "hivemap", "animplay"))
connect(transistor_2, (action1, "hivemap", "soundplay"))
#action 2
action2 = action2hivemaphive()
vrun = variable("id")("run")
keyR = dragonfly.io.keyboardsensor_trigger("R")
transistor_4 = transistor("id")()
connect(vrun, transistor_4)
connect(keyR, transistor_4)
connect(transistor_4, (action2, "hivemap", "actionplay"))
vshoot = variable("id")("shoot")
keySPACE = dragonfly.io.keyboardsensor_trigger("SPACE")
transistor_3 = transistor("id")()
connect(vshoot, transistor_3)
connect(keySPACE, transistor_3)
connect(transistor_3, (action2, "hivemap", "actionplay"))
#action 3
action3 = action3hive()
vswim = variable("id")("swim")
keyS = dragonfly.io.keyboardsensor_trigger("S")
transistor_6 = transistor("id")()
connect(vswim, transistor_6)
connect(keyS, transistor_6)
connect(transistor_6, action3.animplay)
connect(transistor_6, action3.soundplay)
vcrouch = variable("id")("crouch")
keyC = dragonfly.io.keyboardsensor_trigger("C")
transistor_7 = transistor("id")()
connect(vcrouch, transistor_7)
connect(keyC, transistor_7)
connect(transistor_7, action3.animplay)
connect(transistor_7, action3.soundplay)
| bsd-2-clause | 970,173,458,782,025,300 | 27.572519 | 65 | 0.701309 | false | 2.965927 | false | false | false |
bongtrop/peach | peach/fuzzy/base.py | 6 | 4571 | ################################################################################
# Peach - Computational Intelligence for Python
# Jose Alexandre Nalon
#
# This file: fuzzy/fuzzy.py
# Fuzzy logic basic definitions
################################################################################
# Doc string, reStructuredText formatted:
__doc__ = """
This package implements basic definitions for fuzzy logic
"""
################################################################################
import numpy
import types
import norms
################################################################################
# Classes
################################################################################
class FuzzySet(numpy.ndarray):
'''
Array containing fuzzy values for a set.
This class defines the behavior of a fuzzy set. It is an array of values in
the range from 0 to 1, and the basic operations of the logic -- and (using
the ``&`` operator); or (using the ``|`` operator); not (using ``~``
operator) -- can be defined according to a set of norms. The norms can be
redefined using the appropriated methods.
To create a FuzzySet, instantiate this class with a sequence as argument,
for example::
fuzzy_set = FuzzySet([ 0., 0.25, 0.5, 0.75, 1.0 ])
'''
__AND__ = norms.ZadehAnd
'Class variable to hold the *and* method'
__OR__ = norms.ZadehOr
'Class variable to hold the *or* method'
__NOT__ = norms.ZadehNot
'Class variable to hold the *not* method'
def __new__(cls, data):
'''
Allocates space for the array.
A fuzzy set is derived from the basic NumPy array, so the appropriate
functions and methods are called to allocate the space. In theory, the
values for a fuzzy set should be in the range ``0.0 <= x <= 1.0``, but
to increase efficiency, no verification is made.
:Returns:
A new array object with the fuzzy set definitions.
'''
data = numpy.array(data, dtype=float)
shape = data.shape
data = numpy.ndarray.__new__(cls, shape=shape, buffer=data,
dtype=float, order=False)
return data.copy()
def __init__(self, data=[]):
'''
Initializes the object.
Operations are defaulted to Zadeh norms ``(max, min, 1-x)``
'''
pass
def __and__(self, a):
'''
Fuzzy and (``&``) operation.
'''
return FuzzySet(FuzzySet.__AND__(self, a))
def __or__(self, a):
'''
Fuzzy or (``|``) operation.
'''
return FuzzySet(FuzzySet.__OR__(self, a))
def __invert__(self):
'''
Fuzzy not (``~``) operation.
'''
return FuzzySet(FuzzySet.__NOT__(self))
@classmethod
def set_norm(cls, f):
'''
Selects a t-norm (and operation)
Use this method to change the behaviour of the and operation.
:Parameters:
f
A function of two parameters which must return the ``and`` of the
values.
'''
if isinstance(f, numpy.vectorize):
cls.__AND__ = f
elif isinstance(f, types.FunctionType):
cls.__AND__ = numpy.vectorize(f)
else:
raise ValueError, 'invalid function'
@classmethod
def set_conorm(cls, f):
'''
Selects a t-conorm (or operation)
Use this method to change the behaviour of the or operation.
:Parameters:
f
A function of two parameters which must return the ``or`` of the
values.
'''
if isinstance(f, numpy.vectorize):
cls.__OR__ = f
elif isinstance(f, types.FunctionType):
cls.__OR__ = numpy.vectorize(f)
else:
raise ValueError, 'invalid function'
@classmethod
def set_negation(cls, f):
'''
Selects a negation (not operation)
Use this method to change the behaviour of the not operation.
:Parameters:
f
A function of one parameter which must return the ``not`` of the
value.
'''
if isinstance(f, numpy.vectorize):
cls.__NOT__ = f
elif isinstance(f, types.FunctionType):
cls.__NOT__ = numpy.vectorize(f)
else:
raise ValueError, 'invalid function'
################################################################################
# Test
if __name__ == "__main__":
pass | lgpl-2.1 | -3,479,241,759,327,574,000 | 28.688312 | 80 | 0.504266 | false | 4.571 | false | false | false |
ShaguptaS/faker | faker/providers/internet.py | 1 | 4044 | from __future__ import unicode_literals
from . import BaseProvider
import random
import re
from faker.providers.lorem import Provider as Lorem
class Provider(BaseProvider):
safe_email_tlds = ('org', 'com', 'net')
free_email_domains = ('gmail.com', 'yahoo.com', 'hotmail.com')
tlds = ('com', 'com', 'com', 'com', 'com', 'com', 'biz', 'info', 'net', 'org')
uri_pages = (
'index', 'home', 'search', 'main', 'post', 'homepage', 'category', 'register', 'login', 'faq', 'about', 'terms',
'privacy', 'author')
uri_paths = (
'app', 'main', 'wp-content', 'search', 'category', 'tag', 'categories', 'tags', 'blog', 'posts', 'list', 'explore')
uri_extensions = ('.html', '.html', '.html', '.htm', '.htm', '.php', '.php', '.jsp', '.asp')
user_name_formats = (
'{{last_name}}.{{first_name}}',
'{{first_name}}.{{last_name}}',
'{{first_name}}##',
'?{{last_name}}',
)
email_formats = (
'{{user_name}}@{{domain_name}}',
'{{user_name}}@{{free_email_domain}}',
)
url_formats = (
'http://www.{{domain_name}}/',
'http://{{domain_name}}/',
)
uri_formats = (
'{{url}}',
'{{url}}{{uri_page}}/',
'{{url}}{{uri_page}}{{uri_extension}}',
'{{url}}{{uri_path}}/{{uri_page}}/',
'{{url}}{{uri_path}}/{{uri_page}}{{uri_extension}}',
)
def email(self):
pattern = self.random_element(self.email_formats)
return "".join(self.generator.parse(pattern).split(" "))
def safe_email(self):
return self.user_name() + '@example.' + self.random_element(self.safe_email_tlds)
def free_email(self):
return self.user_name() + '@' + self.free_email_domain()
def company_email(self):
return self.user_name() + '@' + self.domain_name()
@classmethod
def free_email_domain(cls):
return cls.random_element(cls.free_email_domains)
def user_name(self):
pattern = self.random_element(self.user_name_formats)
return self.bothify(self.generator.parse(pattern)).lower()
def domain_name(self):
return self.domain_word() + '.' + self.tld()
def domain_word(self):
company = self.generator.format('company')
company_elements = company.split(' ')
company = company_elements.pop(0)
return re.sub(r'\W', '', company).lower()
def tld(self):
return self.random_element(self.tlds)
def url(self):
pattern = self.random_element(self.url_formats)
return self.generator.parse(pattern)
def ipv4(self):
"""
Convert 32-bit integer to dotted IPv4 address.
"""
return ".".join(map(lambda n: str(random.randint(-2147483648, 2147483647) >> n & 0xFF), [24, 16, 8, 0]))
def ipv6(self):
res = []
for i in range(0, 8):
res.append(hex(random.randint(0, 65535))[2:].zfill(4))
return ":".join(res)
@classmethod
def uri_page(cls):
return cls.random_element(cls.uri_pages)
@classmethod
def uri_path(cls, deep=None):
deep = deep if deep else random.randint(1, 3)
return "/".join([cls.random_element(cls.uri_paths) for _ in range(0, deep)])
@classmethod
def uri_extension(cls):
return cls.random_element(cls.uri_extensions)
def uri(self):
pattern = self.random_element(self.uri_formats)
return self.generator.parse(pattern)
@classmethod
def slug(cls, value=None):
"""
Django algorithm
"""
import unicodedata
#value = unicode(value or Lorem.text(20))
#value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
#value = unicode(re.sub(r'[^\w\s-]', '', value).strip().lower())
#return re.sub('[-\s]+', '-', value)
value = unicodedata.normalize('NFKD', value or Lorem.text(20)).encode('ascii', 'ignore').decode('ascii')
value = re.sub('[^\w\s-]', '', value).strip().lower()
return re.sub('[-\s]+', '-', value)
| mit | 4,615,283,915,182,011,000 | 31.878049 | 119 | 0.560089 | false | 3.45641 | false | false | false |
tanbro/exosip2ctypes | src/exosip2ctypes/utils.py | 1 | 2977 | # -*- coding: utf-8 -*-
"""
Some helper functions
"""
from __future__ import absolute_import, unicode_literals
import logging
__all__ = ['to_bytes', 'to_str', 'to_unicode', 'LoggerMixin']
if bytes != str: # Python 3
#: Define text string data type, same as that in Python 2.x.
unicode = str
def to_bytes(s, encoding='utf-8'):
"""Convert to `bytes` string.
:param s: String to convert.
:param str encoding: Encoding codec.
:return: `bytes` string, it's `bytes` or `str` in Python 2.x, `bytes` in Python 3.x.
:rtype: bytes
* In Python 2, convert `s` to `bytes` if it's `unicode`.
* In Python 2, return original `s` if it's not `unicode`.
* In Python 2, it equals to :func:`to_str`.
* In Python 3, convert `s` to `bytes` if it's `unicode` or `str`.
* In Python 3, return original `s` if it's neither `unicode` nor `str`.
"""
if isinstance(s, unicode):
return s.encode(encoding)
else:
return s
def to_str(s, encoding='utf-8'):
"""Convert to `str` string.
:param s: String to convert.
:param str encoding: Decoding codec.
:return: `str` string, it's `bytes` in Python 2.x, `unicode` or `str` in Python 3.x.
:rtype: str
* In Python 2, convert `s` to `str` if it's `unicode`.
* In Python 2, return original `s` if it's not `unicode`.
* In Python 2, it equals to :func:`to_bytes`.
* In Python 3, convert `s` to `str` if it's `bytes`.
* In Python 3, return original `s` if it's not `bytes`.
* In Python 3, it equals to :func:`to_unicode`.
"""
if bytes == str: # Python 2
return to_bytes(s, encoding)
else: # Python 3
return to_unicode(s, encoding)
def to_unicode(s, encoding='utf-8'):
"""Convert to `unicode` string.
:param s: String to convert.
:param str encoding: Encoding codec.
:return: `unicode` string, it's `unicode` in Python 2.x, `str` or `unicode` in Python 3.x.
:rtype: unicode
* In Python 2, convert `s` to `unicode` if it's `str` or `bytes`.
* In Python 2, return original `s` if it's neither `str` or `bytes`.
* In Python 3, convert `s` to `str` or `unicode` if it's `bytes`.
* In Python 3, return original `s` if it's not `bytes`.
* In Python 3, it equals to :func:`to_str`.
"""
if isinstance(s, bytes):
return s.decode(encoding)
else:
return s
class LoggerMixin(object):
"""Mixin Class provide a :attr:`logger` property
"""
@property
def logger(self):
"""`logger` instance.
:rtype: logging.Logger
logger name format is `ModuleName.ClassName`
"""
try:
name = '{0.__module__:s}.{0.__qualname__:s}'.format(self.__class__)
except AttributeError:
name = '{0.__module__:s}.{0.__name__:s}'.format(self.__class__)
return logging.getLogger(name)
| gpl-3.0 | -2,007,577,341,453,639,200 | 29.336842 | 94 | 0.57306 | false | 3.494131 | false | false | false |
Erotemic/hotspotter | _graveyard/_broken/MainWindow.py | 2 | 17501 | from PyQt4 import QtCore, QtGui
from PyQt4.Qt import QObject, pyqtSignal, QFileDialog
from MainSkel import Ui_mainSkel
import multiprocessing
from PyQt4.Qt import QMainWindow, QTableWidgetItem, QMessageBox, \
QAbstractItemView, QWidget, Qt, pyqtSlot, pyqtSignal, \
QStandardItem, QStandardItemModel, QString, QObject
from _tpl.other.matplotlibwidget import MatplotlibWidget
# http://stackoverflow.com/questions/2312210/window-icon-of-exe-in-pyqt4
#-------------------------------------------
def gui_log(fn):
'log what happens in the GUI for debugging'
def gui_log_wrapper(hsgui, *args, **kwargs):
try:
function_name = fn.func_name
into_str = 'In hsgui.'+function_name
outo_str = 'Out hsgui.'+function_name+'\n'
hsgui.logdbgSignal.emit(into_str)
ret = fn(hsgui, *args, **kwargs)
hsgui.logdbgSignal.emit(outo_str)
return ret
except Exception as ex:
import traceback
logmsg('\n\n *!!* HotSpotter GUI Raised Exception: '+str(ex))
logmsg('\n\n *!!* HotSpotter GUI Exception Traceback: \n\n'+traceback.format_exc())
return gui_log_wrapper
class EditPrefWidget(QWidget):
'The Settings Pane; Subclass of Main Windows.'
def __init__(epw, fac):
super( EditPrefWidget, epw ).__init__()
epw.pref_skel = Ui_editPrefSkel()
epw.pref_skel.setupUi(epw)
epw.pref_model = None
epw.pref_skel.redrawBUT.clicked.connect(fac.redraw)
epw.pref_skel.defaultPrefsBUT.clicked.connect(fac.default_prefs)
epw.pref_skel.unloadFeaturesAndModelsBUT.clicked.connect(fac.unload_features_and_models)
@pyqtSlot(Pref, name='populatePrefTreeSlot')
def populatePrefTreeSlot(epw, pref_struct):
'Populates the Preference Tree Model'
logdbg('Bulding Preference Model of: '+repr(pref_struct))
epw.pref_model = pref_struct.createQPreferenceModel()
logdbg('Built: '+repr(epw.pref_model))
epw.pref_skel.prefTreeView.setModel(epw.pref_model)
epw.pref_skel.prefTreeView.header().resizeSection(0,250)
class MainWindow(QtGui.QMainWindow):
populateChipTblSignal = pyqtSignal(list, list, list, list)
def __init__(self, hs=None):
super(HotSpotterMainWindow, self).__init__()
self.hs = None
self.ui=Ui_mainSkel()
self.ui.setupUi(self)
self.show()
if hs is None:
self.connect_api(hs)
def connect_api(self, hs):
print('[win] connecting api')
self.hs = hs
hsgui.epw = EditPrefWidget(fac)
hsgui.plotWidget = MatplotlibWidget(hsgui.main_skel.centralwidget)
hsgui.plotWidget.setObjectName(_fromUtf8('plotWidget'))
hsgui.main_skel.root_hlayout.addWidget(hsgui.plotWidget)
hsgui.prev_tbl_item = None
hsgui.prev_cid = None
hsgui.prev_gid = None
hsgui.non_modal_qt_handles = []
def connectSignals(hsgui, fac):
'Connects GUI signals to Facade Actions'
logdbg('Connecting GUI >> to >> Facade')
# Base Signals
hsgui.selectCidSignal.connect(fac.selc)
hsgui.selectGidSignal.connect(fac.selg)
hsgui.renameChipIdSignal.connect(fac.rename_cid)
hsgui.changeChipPropSignal.connect(fac.change_chip_prop)
hsgui.logdbgSignal.connect(fac.logdbgSlot)
# SKEL SIGNALS
main_skel = hsgui.main_skel
# Widget
hsgui.main_skel.fignumSPIN.valueChanged.connect(
fac.set_fignum)
# File
main_skel.actionOpen_Database.triggered.connect(
fac.open_db)
main_skel.actionSave_Database.triggered.connect(
fac.save_db)
main_skel.actionImport_Images.triggered.connect(
fac.import_images)
main_skel.actionQuit.triggered.connect(
hsgui.close)
# Actions
main_skel.actionQuery.triggered.connect(
fac.query)
main_skel.actionAdd_ROI.triggered.connect(
fac.add_chip)
main_skel.actionReselect_Orientation.triggered.connect(
fac.reselect_orientation)
main_skel.actionReselect_ROI.triggered.connect(
fac.reselect_roi)
main_skel.actionRemove_Chip.triggered.connect(
fac.remove_cid)
main_skel.actionNext.triggered.connect(
fac.select_next)
# Options
main_skel.actionTogEll.triggered.connect(
fac.toggle_ellipse)
main_skel.actionTogPts.triggered.connect(
fac.toggle_points)
main_skel.actionTogPlt.triggered.connect(
hsgui.setPlotWidgetVisibleSlot)
main_skel.actionPreferences.triggered.connect(
hsgui.epw.show )
# Help
main_skel.actionView_Documentation.triggered.connect(
fac.view_documentation)
main_skel.actionHelpCMD.triggered.connect(
lambda:hsgui.msgbox('Command Line Help', cmd_help))
main_skel.actionHelpWorkflow.triggered.connect(
lambda:hsgui.msgbox('Workflow HOWTO', workflow_help))
main_skel.actionHelpTroubles.triggered.connect(
lambda:hsgui.msgbox('Troubleshooting Help', troubles_help))
main_skel.actionWriteLogs.triggered.connect(
fac.write_logs)
# Convinience
main_skel.actionOpen_Source_Directory.triggered.connect(
fac.vd)
main_skel.actionOpen_Data_Directory.triggered.connect(
fac.vdd)
main_skel.actionOpen_Internal_Directory.triggered.connect(
fac.vdi)
main_skel.actionConvertImage2Chip.triggered.connect(
fac.convert_all_images_to_chips)
main_skel.actionBatch_Change_Name.triggered.connect(
fac._quick_and_dirty_batch_rename)
main_skel.actionAdd_Metadata_Property.triggered.connect(
fac.add_new_prop)
main_skel.actionAssign_Matches_Above_Threshold.triggered.connect(
fac.match_all_above_thresh)
main_skel.actionIncrease_ROI_Size.triggered.connect(
fac.expand_rois)
# Experiments
main_skel.actionMatching_Experiment.triggered.connect(
fac.run_matching_experiment)
main_skel.actionName_Consistency_Experiment.triggered.connect(
fac.run_name_consistency_experiment)
#
# Gui Components
# Tables Widgets
main_skel.chip_TBL.itemClicked.connect(
hsgui.chipTableClickedSlot)
main_skel.chip_TBL.itemChanged.connect(
hsgui.chipTableChangedSlot)
main_skel.image_TBL.itemClicked.connect(
hsgui.imageTableClickedSlot)
main_skel.res_TBL.itemChanged.connect(
hsgui.resultTableChangedSlot)
# Tab Widget
# This signal slot setup is very bad. Needs rewrite
main_skel.tablesTabWidget.currentChanged.connect(
fac.change_view)
main_skel.chip_TBL.sortByColumn(0, Qt.AscendingOrder)
main_skel.res_TBL.sortByColumn(0, Qt.AscendingOrder)
main_skel.image_TBL.sortByColumn(0, Qt.AscendingOrder)
@pyqtSlot(name='setPlotWidgetVisible')
def setPlotWidgetVisibleSlot(hsgui, bit=None): #None = toggle
if hsgui.plotWidget != None:
logdbg('Disabling Plot Widget')
if bit is None: bit = not hsgui.plotWidget.isVisible()
was_visible = hsgui.plotWidget.setVisible(bit)
if was_visible != bit:
if bit:
hsgui.main_skel.fignumSPIN.setValue(0)
else:
hsgui.main_skel.fignumSPIN.setValue(1)
#hsgui.setFignumSignal.emit(int(1 - bit)) # plotwidget fignum = 0
# Internal GUI Functions
def populate_tbl_helper(hsgui, tbl, col_headers, col_editable, row_list, row2_data_tup ):
#tbl = main_skel.chip_TBL
hheader = tbl.horizontalHeader()
sort_col = hheader.sortIndicatorSection()
sort_ord = hheader.sortIndicatorOrder()
tbl.sortByColumn(0, Qt.AscendingOrder) # Basic Sorting
prevBlockSignals = tbl.blockSignals(True)
tbl.clear()
tbl.setColumnCount(len(col_headers))
tbl.setRowCount(len(row_list))
tbl.verticalHeader().hide()
tbl.setHorizontalHeaderLabels(col_headers)
tbl.setSelectionMode( QAbstractItemView.SingleSelection )
tbl.setSelectionBehavior( QAbstractItemView.SelectRows)
tbl.setSortingEnabled(False)
for row in iter(row_list):
data_tup = row2_data_tup[row]
for col, data in enumerate(data_tup):
item = QTableWidgetItem()
try:
int_data = int(data)
item.setData(Qt.DisplayRole, int_data)
except ValueError: # for strings
item.setText(str(data))
except TypeError: #for lists
item.setText(str(data))
item.setTextAlignment(Qt.AlignHCenter)
if col_editable[col]: item.setFlags(item.flags() | Qt.ItemIsEditable)
else: item.setFlags(item.flags() ^ Qt.ItemIsEditable)
tbl.setItem(row, col, item)
tbl.setSortingEnabled(True)
tbl.sortByColumn(sort_col,sort_ord) # Move back to old sorting
tbl.show()
tbl.blockSignals(prevBlockSignals)
@pyqtSlot(dict, name='updateDBStatsSlot')
@gui_log
def updateDBStatsSlot(hsgui, stats):
hsgui.setWindowTitle(stats['title'])
def updateSelSpinsSlot(hsgui, cid, gid):
hsgui.prev_cid = cid
hsgui.prev_gid = gid
hsgui.main_skel.sel_cid_SPIN.setValue(cid)
hsgui.main_skel.sel_gid_SPIN.setValue(gid)
def redrawGuiSlot(hsgui):
hsgui.show()
if hsgui.plotWidget != None and\
hsgui.plotWidget.isVisible():
hsgui.plotWidget.show()
hsgui.plotWidget.draw()
def updateStateLabelSlot(hsgui, state):
hsgui.main_skel.state_LBL.setText(state)
@pyqtSlot(list, list, list, list, name='populateChipTblSlot')
def populateChipTblSlot(hsgui, col_headers, col_editable, row_list, row2_data_tup):
hsgui.populate_tbl_helper(hsgui.main_skel.chip_TBL, col_headers, col_editable, row_list, row2_data_tup)
@pyqtSlot(list, list, list, list, name='populateImageTblSlot')
def populateImageTblSlot(hsgui, col_headers, col_editable, row_list, row2_data_tup):
hsgui.populate_tbl_helper(hsgui.main_skel.image_TBL, col_headers, col_editable, row_list, row2_data_tup)
@pyqtSlot(list, list, list, list, name='populateResultTblSlot')
def populateResultTblSlot(hsgui, col_headers, col_editable, row_list, row2_data_tup):
hsgui.populate_tbl_helper(hsgui.main_skel.res_TBL, col_headers, col_editable, row_list, row2_data_tup)
@gui_log
def chipTableChangedSlot(hsgui, item):
'A Chip had a data member changed '
hsgui.logdbgSignal.emit('chip table changed')
sel_row = item.row()
sel_col = item.column()
sel_cid = int(hsgui.main_skel.chip_TBL.item(sel_row,0).text())
new_val = str(item.text()).replace(',',';;')
header_lbl = str(hsgui.main_skel.chip_TBL.horizontalHeaderItem(sel_col).text())
hsgui.selectCidSignal.emit(sel_cid)
# Rename the chip!
if header_lbl == 'Chip Name':
hsgui.renameChipIdSignal.emit(new_val, sel_cid)
# Change the user property instead
else:
hsgui.changeChipPropSignal.emit(header_lbl, new_val, sel_cid)
@gui_log
def resultTableChangedSlot(hsgui, item):
'A Chip was Renamed in Result View'
hsgui.logdbgSignal.emit('result table changed')
sel_row = item.row()
sel_cid = int(hsgui.main_skel.res_TBL.item(sel_row,1).text())
new_name = str(item.text())
hsgui.renameChipIdSignal.emit(new_name, int(sel_cid))
def imageTableClickedSlot(hsgui, item):
'Select Image ID'
if item == hsgui.prev_tbl_item: return
hsgui.prev_tbl_item = item
sel_row = item.row()
sel_gid = int(hsgui.main_skel.image_TBL.item(sel_row,0).text())
hsgui.selectGidSignal.emit(sel_gid)
def chipTableClickedSlot(hsgui, item):
'Select Chip ID'
hsgui.logdbgSignal.emit('chip table clicked')
if item == hsgui.prev_tbl_item: return
hsgui.prev_tbl_item = item
sel_row = item.row()
sel_cid = int(hsgui.main_skel.chip_TBL.item(sel_row,0).text())
hsgui.selectCidSignal.emit(sel_cid)
def update_image_table(self):
uim.populateImageTblSignal.connect( uim.hsgui.populateImageTblSlot )
pass
def select_tab(uim, tabname, block_draw=False):
logdbg('Selecting the '+tabname+' Tab')
if block_draw:
prevBlock = uim.hsgui.main_skel.tablesTabWidget.blockSignals(True)
tab_index = uim.tab_order.index(tabname)
uim.selectTabSignal.emit(tab_index)
if block_draw:
uim.hsgui.main_skel.tablesTabWidget.blockSignals(prevBlock)
def get_gui_figure(uim):
'returns the matplotlib.pyplot.figure'
if uim.hsgui != None and uim.hsgui.plotWidget != None:
fig = uim.hsgui.plotWidget.figure
fig.show = lambda: uim.hsgui.plotWidget.show() #HACKY HACK HACK
return fig
return None
@func_log
def redraw_gui(uim):
if not uim.hsgui is None and uim.hsgui.isVisible():
uim.redrawGuiSignal.emit()
# --- UIManager things that deal with the GUI Through Signals
@func_log
def populate_chip_table(uim):
#tbl = uim.hsgui.main_skel.chip_TBL
cm = uim.hs.cm
col_headers = ['Chip ID', 'Chip Name', 'Name ID', 'Image ID', 'Other CIDS']
col_editable = [ False , True , False , False , False ]
# Add User Properties to headers
col_headers += cm.user_props.keys()
col_editable += [True for key in cm.user_props.keys()]
# Create Data List
cx_list = cm.get_valid_cxs()
data_list = [None]*len(cx_list)
row_list = range(len(cx_list))
for (i,cx) in enumerate(cx_list):
# Get Indexing Data
cid = cm.cx2_cid[cx]
gid = cm.cx2_gid(cx)
nid = cm.cx2_nid(cx)
# Get Useful Data
name = cm.cx2_name(cx)
other_cxs_ = setdiff1d(cm.cx2_other_cxs([cx])[0], cx)
other_cids = cm.cx2_cid[other_cxs_]
# Get User Data
cm.user_props.keys()
user_data = [cm.user_props[key][cx] for key in
cm.user_props.iterkeys()]
# Pack data to sent to Qt
data_list[i] = (cid, name, nid, gid, other_cids)+tuple(user_data)
#(cid, name, nid, gid, other_cids, *user_data)
uim.populateChipTblSignal.emit(col_headers, col_editable, row_list, data_list)
@func_log
def populate_image_table(uim):
col_headers = ['Image ID', 'Image Name', 'Chip IDs', 'Chip Names']
col_editable = [ False , False , False , False ]
# Populate table with valid image indexes
cm, gm = uim.hs.get_managers('cm','gm')
gx_list = gm.get_valid_gxs()
data_list = [None]*len(gx_list)
row_list = range(len(gx_list))
for (i,gx) in enumerate(gx_list):
gid = gm.gx2_gid[gx]
gname = gm.gx2_gname[gx]
cid_list = gm.gx2_cids(gx)
name_list = str([cm.cid2_(cid, 'name') for cid in cid_list])
data_list[i] = (gid, gname, cid_list, name_list)
uim.populateImageTblSignal.emit(col_headers, col_editable, row_list, data_list)
@func_log
def populate_result_table(uim):
col_headers = ['Rank', 'Chip ID', 'Chip Name', 'score']
col_editable = [False , False , True , False ]
# Check to see if results exist
res = uim.sel_res
if res is None:
logdbg('Not populating. selected results are None.')
return None
logmsg(res)
gm, cm, am = uim.hs.get_managers('gm','cm','am')
dynargs =\
('cid', 'name' )
(qcid , qname ) = res.qcid2_(*dynargs)
(tcid , tname , tscore ) = res.tcid2_(*dynargs+('score',))
num_results = len(tcid)
data_list = [None]*(num_results+1)
row_list = range(num_results+1)
data_list[0] = [0, qcid, qname, 'Queried Chip']
for (ix, (cid, name, score)) in enumerate(zip(tcid, tname, tscore)):
rank = ix+1
data_list[ix+1] = (rank, cid, name, score)
uim.populateResultTblSignal.emit(col_headers, col_editable, row_list, data_list)
def populate_algo_settings(uim):
logdbg('Populating the Preference Tree... Sending Signal')
uim.populatePrefTreeSignal.emit(uim.hs.all_pref)
def set_fignum(uim, fignum):
if uim.hsgui != None:
prevBlockSignals = uim.hsgui.main_skel.fignumSPIN.blockSignals(True)
uim.setfignumSignal.emit(fignum)
uim.hsgui.main_skel.fignumSPIN.blockSignals(prevBlockSignals)
if __name__ == '__main__':
import sys
multiprocessing.freeze_support()
def test():
app = QtGui.QApplication(sys.argv)
main_win = HotSpotterMainWindow()
app.setActiveWindow(main_win)
sys.exit(app.exec_())
test()
| apache-2.0 | 2,173,761,121,248,932,400 | 41.581509 | 112 | 0.616708 | false | 3.337974 | false | false | false |
stackdump/txbitwrap | txbitwrap/storage/postgres.py | 1 | 5479 | from string import Template
from twisted.internet.defer import inlineCallbacks
from twisted.enterprise import adbapi
ProgrammingError = Exception # FIXME
TOKEN_MAX = 65536
def connect(**kwargs):
""" create new connection pool """
dbpool = adbapi.ConnectionPool(
"psycopg2",
cp_min=3,
cp_max=10,
cp_noisy=True,
cp_reconnect=True,
user=kwargs['pg-username'],
password=kwargs['pg-password'],
host=kwargs['pg-host'],
database=kwargs['pg-database']
)
return dbpool
def drop_schema(schema, **kwargs):
"" ""
if 'conn' in kwargs:
conn = kwargs.pop('conn')
else:
conn = connect(**kwargs)
sql = "DROP SCHEMA IF EXISTS %s CASCADE" % schema
return conn.runOperation(sql)
@inlineCallbacks
def create_schema(machine, **kwargs):
""" add a new schema to an existing db """
if 'conn' in kwargs:
conn = kwargs.pop('conn')
else:
conn = connect(**kwargs)
schema = kwargs.get('schema_name', machine.name)
yield conn.runOperation("CREATE schema %s" % schema)
yield conn.runOperation("""
CREATE DOMAIN %s.token as smallint CHECK(VALUE >= 0 and VALUE <= %i)
""" % (schema, TOKEN_MAX))
num_places = len(machine.machine['state'])
columns = [''] * num_places
vector = [''] * num_places
delta = [''] * num_places
for key, props in machine.net.places.items():
i = props['offset']
columns[i] = ' %s %s.token' % (key, schema)
vector[i] = ' %s int4' % key
delta[i] = " (state).%s + conn.%s" % (key, key)
yield conn.runOperation("""
CREATE TYPE %s.state as ( %s )
""" % (schema, ','.join(columns)))
yield conn.runOperation("""
CREATE TYPE %s.vector as ( %s )
""" % (schema, ','.join(vector)))
yield conn.runOperation("""
CREATE TYPE %s.event as (
id varchar(32),
oid varchar(255),
rev int4
)
""" % (schema))
yield conn.runOperation("""
CREATE TYPE %s.event_payload as (
id varchar(32),
oid varchar(255),
seq int4,
action varchar(255),
payload json,
timestamp timestamp
)
""" % (schema))
yield conn.runOperation("""
CREATE TYPE %s.current_state as (
id varchar(32),
oid varchar(255),
action varchar(255),
rev int4,
state %s.state,
payload json,
modified timestamp,
created timestamp
)
""" % (schema, schema))
initial_vector = machine.net.initial_vector()
# KLUDGE: this seems to be a limitation of how default values are declared
# this doesn't work when state vector has only one element
# state %s.state DEFAULT (0), # FAILS
# state %s.state DEFAULT (0,0), # WORKS
if len(initial_vector) < 2:
raise Exception('state vector must be an n-tuple where n >= 2')
yield conn.runOperation("""
CREATE TABLE %s.states (
oid VARCHAR(256) PRIMARY KEY,
rev int4 default 0,
state %s.state DEFAULT %s::%s.state,
created timestamp DEFAULT now(),
modified timestamp DEFAULT now()
);
""" % (schema, schema, tuple(initial_vector), schema))
yield conn.runOperation("""
CREATE TABLE %s.transitions (
action VARCHAR(255) PRIMARY KEY,
vector %s.vector
);
""" % (schema, schema))
for key, props in machine.net.transitions.items():
yield conn.runOperation("""
INSERT INTO %s.transitions values('%s', %s)
""" % (schema, key, tuple(props['delta'])))
yield conn.runOperation("""
CREATE TABLE %s.events (
oid VARCHAR(255) REFERENCES %s.states(oid) ON DELETE CASCADE ON UPDATE CASCADE,
seq SERIAL,
action VARCHAR(255) NOT NULL,
payload jsonb DEFAULT '{}',
hash VARCHAR(32) NOT NULL,
timestamp timestamp DEFAULT NULL
);
""" % (schema, schema))
yield conn.runOperation("""
ALTER TABLE %s.events ADD CONSTRAINT %s_oid_seq_pkey PRIMARY KEY (oid, seq);
""" % (schema, schema))
yield conn.runOperation("""
CREATE INDEX %s_hash_idx on %s.events (hash);
""" % (schema, schema))
function_template = Template("""
CREATE OR REPLACE FUNCTION ${name}.vclock() RETURNS TRIGGER
AS $MARKER
DECLARE
conn ${name}.vector;
revision int4;
BEGIN
SELECT
(vector).* INTO STRICT conn
FROM
${name}.transitions
WHERE
action = NEW.action;
UPDATE
${name}.states set
state = ( ${delta} ),
rev = rev + 1,
modified = now()
WHERE
oid = NEW.oid
RETURNING
rev into STRICT revision;
NEW.seq = revision;
NEW.hash = md5(row_to_json(NEW)::TEXT);
NEW.timestamp = now();
RETURN NEW;
END
$MARKER LANGUAGE plpgsql""")
fn_sql = function_template.substitute(
MARKER='$$',
name=schema,
var1='$1',
var2='$2',
var3='$3',
delta=','.join(delta)
)
yield conn.runOperation(fn_sql)
function_template = Template("""
CREATE TRIGGER ${name}_dispatch
BEFORE INSERT on ${name}.events
FOR EACH ROW EXECUTE PROCEDURE ${name}.vclock();
""")
trigger_sql = function_template.substitute(name=schema)
yield conn.runOperation(trigger_sql)
| mit | -5,308,692,846,363,749,000 | 25.857843 | 85 | 0.566892 | false | 3.938893 | false | false | false |
edx/edx-val | edxval/transcript_utils.py | 1 | 3768 | """
A module containing transcripts utils.
"""
# pylint: disable=inconsistent-return-statements
import json
from pysrt import SubRipFile, SubRipItem, SubRipTime
from pysrt.srtexc import Error
from edxval.exceptions import TranscriptsGenerationException
class Transcript:
"""
Container for transcript methods.
"""
SRT = 'srt'
SJSON = 'sjson'
@staticmethod
def generate_sjson_from_srt(srt_subs):
"""
Generate transcripts from sjson to SubRip (*.srt).
Arguments:
srt_subs(SubRip): "SRT" subs object
Returns:
Subs converted to "SJSON" format.
"""
sub_starts = []
sub_ends = []
sub_texts = []
for sub in srt_subs:
sub_starts.append(sub.start.ordinal)
sub_ends.append(sub.end.ordinal)
sub_texts.append(sub.text.replace('\n', ' '))
sjson_subs = {
'start': sub_starts,
'end': sub_ends,
'text': sub_texts
}
return sjson_subs
@staticmethod
def generate_srt_from_sjson(sjson_subs):
"""
Generate transcripts from sjson to SubRip (*.srt)
Arguments:
sjson_subs (dict): `sjson` subs.
Returns:
Subtitles in SRT format.
"""
output = ''
equal_len = len(sjson_subs['start']) == len(sjson_subs['end']) == len(sjson_subs['text'])
if not equal_len:
return output
for i in range(len(sjson_subs['start'])):
item = SubRipItem(
index=i,
start=SubRipTime(milliseconds=sjson_subs['start'][i]),
end=SubRipTime(milliseconds=sjson_subs['end'][i]),
text=sjson_subs['text'][i]
)
output += (str(item))
output += '\n'
return output
@classmethod
def convert(cls, content, input_format, output_format):
"""
Convert transcript `content` from `input_format` to `output_format`.
Arguments:
content: Transcript content byte-stream.
input_format: Input transcript format.
output_format: Output transcript format.
Accepted input formats: sjson, srt.
Accepted output format: srt, sjson.
Raises:
TranscriptsGenerationException: On parsing the invalid srt
content during conversion from srt to sjson.
"""
assert input_format in ('srt', 'sjson')
assert output_format in ('srt', 'sjson')
# Decode the content with utf-8-sig which will also
# skip byte order mark(BOM) character if found.
try:
content = content.decode('utf-8-sig')
except UnicodeDecodeError:
# Most of our stuff is UTF-8, but don't break if Latin-1 encoded
# transcripts are still floating around in older courses.
content = content.decode('latin-1')
if input_format == output_format:
return content
if input_format == 'srt':
if output_format == 'sjson':
try:
# With error handling (set to 'ERROR_RAISE'), we will be getting
# the exception if something went wrong in parsing the transcript.
srt_subs = SubRipFile.from_string(content, error_handling=SubRipFile.ERROR_RAISE)
except Error as ex: # Base exception from pysrt
raise TranscriptsGenerationException(str(ex)) from ex
return json.dumps(cls.generate_sjson_from_srt(srt_subs))
if input_format == 'sjson':
if output_format == 'srt':
return cls.generate_srt_from_sjson(json.loads(content))
| agpl-3.0 | -1,132,600,385,978,478,500 | 29.885246 | 101 | 0.566348 | false | 4.131579 | false | false | false |
bgribble/mfp | mfp/builtins/biquad.py | 1 | 4401 | #! /usr/bin/env python
'''
biquad.py: Biquad filter implementation
Copyright (c) 2012 Bill Gribble <[email protected]>
'''
from mfp.processor import Processor
from ..mfp_app import MFPApp
from mfp import log
import math
from ..bang import Uninit
class Biquad(Processor):
doc_tooltip_obj = "Biquad filter (5-parameter normalized form)"
doc_tooltip_inlet = [ "Signal in or parameter dictionary with keys a1, a2, b0, b1, b2" ]
doc_tooltip_outlet = [ "Signal out" ]
def __init__(self, init_type, init_args, patch, scope, name):
Processor.__init__(self, 1, 1, init_type, init_args, patch, scope, name)
initargs, kwargs = self.parse_args(init_args)
self.dsp_inlets = [0]
self.dsp_outlets = [0]
self.dsp_init("biquad~")
def trigger(self):
if isinstance(self.inlets[0], dict):
for param, val in self.inlets[0].items():
try:
self.dsp_setparam(param, float(val))
except Exception as e:
import traceback
tb = traceback.format_exc()
log.debug("biquad~: Error setting param", param, "to", type(val), str(val))
log.debug("biquad~: Exception:", str(e))
self.error(tb)
def bq_hipass(freq, q):
params = {}
w0 = 2 * math.pi * freq / MFPApp().samplerate
alpha = math.sin(w0) / (2*q)
a0 = 1 + alpha
params['a1'] = (-2.0*math.cos(w0)) / a0
params['a2'] = (1 - alpha) / a0
params['b0'] = (1 + math.cos(w0)) / (2.0 * a0)
params['b1'] = -1.0*(1 + math.cos(w0)) / a0
params['b2'] = (1 + math.cos(w0)) / (2.0 * a0)
return params
def bq_lopass(freq, q):
params = {}
w0 = 2 * math.pi * freq / MFPApp().samplerate
alpha = math.sin(w0) / (2*q)
a0 = 1 + alpha
params['a1'] = (-2.0*math.cos(w0)) / a0
params['a2'] = (1 - alpha) / a0
params['b0'] = (1 - math.cos(w0)) / (2.0 * a0)
params['b1'] = (1 - math.cos(w0)) / a0
params['b2'] = (1 - math.cos(w0)) / (2.0 * a0)
return params
def bq_bandpass(freq, q):
params = {}
w0 = 2 * math.pi * freq / MFPApp().samplerate
alpha = math.sin(w0) / (2*q)
a0 = 1 + alpha
params['a1'] = (-2.0*math.cos(w0)) / a0
params['a2'] = (1 - alpha) / a0
params['b0'] = alpha / a0
params['b1'] = 0
params['b2'] = -1.0 * alpha / a0
return params
class BiquadWrapper(Processor):
doc_tooltip_obj = "%s filter (biquad implementation)"
doc_tooltip_inlet = ["Signal in",
"Frequency of interest (default: initarg 0)",
"Q (filter steepness) (default: initarg 1)"]
doc_tooltip_outlet = ["Signal out"]
def __init__(self, bq_func, init_type, init_args, patch, scope, name):
Processor.__init__(self, 3, 1, init_type, init_args, patch, scope, name)
initargs, kwargs = self.parse_args(init_args)
if len(initargs) > 0:
self.freq = initargs[0]
else:
self.freq = 0
if len(initargs) > 1:
self.q = initargs[1]
else:
self.q = 0.707
self.biquad_thunk = bq_func
self.biquad_params = self.biquad_thunk(self.freq, self.q)
self.hot_inlets = [0, 1, 2]
self.dsp_inlets = [0]
self.dsp_outlets = [0]
self.dsp_init("biquad~", **self.biquad_params)
def trigger(self):
recalc = False
if self.inlets[1] is not Uninit:
self.freq = self.inlets[1]
recalc = True
if self.inlets[2] is not Uninit:
self.q = self.inlets[2]
recalc = True
if recalc:
self.biquad_params = self.biquad_thunk(self.freq, self.q)
for n, v in self.biquad_params.items():
self.dsp_setparam(n, float(v))
def mk_biquad(thunk, filter_name):
def factory(init_type, init_args, patch, scope, name):
bq = BiquadWrapper(thunk, init_type, init_args, patch, scope, name)
bq.doc_tooltip_obj = BiquadWrapper.doc_tooltip_obj % filter_name
return bq
return factory
def register():
MFPApp().register("biquad~", Biquad)
MFPApp().register("hip~", mk_biquad(bq_hipass, "Highpass"))
MFPApp().register("lop~", mk_biquad(bq_lopass, "Lowpass"))
MFPApp().register("bp~", mk_biquad(bq_bandpass, "Bandpass"))
| gpl-2.0 | -6,339,842,470,302,742,000 | 32.59542 | 95 | 0.550557 | false | 3.008202 | false | false | false |
haup/totoro | totoro/app/main/errors.py | 1 | 1506 | from flask import render_template, request, jsonify
from . import main
@main.app_errorhandler(403)
def forbidden(e):
""" This function is called if there was a 403 error exception
and displays the related error page
Return: template of 403 error page
"""
if request.accept_mimetypes.accept_json and \
not request.accept_mimetypes.accept_html:
response = jsonify({'error': 'forbidden'})
response.status_code = 403
return response
return render_template('403.html'), 403
@main.app_errorhandler(404)
def page_not_found(e):
""" This function is called if there was a 404 error exception
and displays the related error page
Return: template of 404 error page
"""
if request.accept_mimetypes.accept_json and \
not request.accept_mimetypes.accept_html:
response = jsonify({'error': 'not found'})
response.status_code = 404
return response
return render_template('404.html'), 404
@main.app_errorhandler(500)
def internal_server_error(e):
""" This function is called if there was a 404 error exception
and displays the related error page
Return: template of 404 error page
"""
if request.accept_mimetypes.accept_json and \
not request.accept_mimetypes.accept_html:
response = jsonify({'error': 'internal server error'})
response.status_code = 500
return response
return render_template('500.html'), 500
| gpl-3.0 | 3,314,742,504,159,151,600 | 31.73913 | 66 | 0.664011 | false | 4.327586 | false | false | false |
zencoders/pyircbot | plugins/karma_rate.py | 1 | 1474 | import time
from collections import defaultdict
class KarmaRateLimiter(object):
def __init__(self, timeout=60, penalty=3):
"""timeout in seconds - default 1 min"""
self.timeout = timeout
self.penalty = penalty
# http://goo.gl/ZFmFX
# http://stackoverflow.com/a/5900628
self.user_last_request = defaultdict(lambda:[int, int])
# defaultdict needs callable obj
def rate_limit(self, nick):
"""Return 0 if not rate_limited, 1 if has penalization, 2 otherwise"""
now = int(time.time())
if nick not in self.user_last_request:
self.user_last_request[nick] = [now,0]
return 0
elif (now - self.user_last_request[nick][0]) < self.timeout:
# Timeout not expired, so increase the counter
self.user_last_request[nick][1] += 1
# User is rate limited
if self.user_last_request[nick][1] % self.penalty == 0:
# give him the penalization!
return 1
else:
return 2
else:
# > timeout OK
self.user_last_request[nick] = [now, 0]
return 0
def user_timeout(self, nick):
"""Return the user specific timeout"""
if nick not in self.user_last_request:
return 0
else:
wait_time = self.timeout - (int(time.time()) - self.user_last_request[nick][0])
return wait_time
| gpl-2.0 | -5,184,493,849,096,102,000 | 34.95122 | 91 | 0.559701 | false | 4.027322 | false | false | false |
arabenjamin/pybrain | pybrain/rl/environments/task.py | 26 | 3088 | __author__ = 'Tom Schaul, [email protected]'
from scipy import clip, asarray
from pybrain.utilities import abstractMethod
class Task(object):
""" A task is associating a purpose with an environment. It decides how to evaluate the
observations, potentially returning reinforcement rewards or fitness values.
Furthermore it is a filter for what should be visible to the agent.
Also, it can potentially act as a filter on how actions are transmitted to the environment. """
def __init__(self, environment):
""" All tasks are coupled to an environment. """
self.env = environment
# limits for scaling of sensors and actors (None=disabled)
self.sensor_limits = None
self.actor_limits = None
self.clipping = True
def setScaling(self, sensor_limits, actor_limits):
""" Expects scaling lists of 2-tuples - e.g. [(-3.14, 3.14), (0, 1), (-0.001, 0.001)] -
one tuple per parameter, giving min and max for that parameter. The functions
normalize and denormalize scale the parameters between -1 and 1 and vice versa.
To disable this feature, use 'None'. """
self.sensor_limits = sensor_limits
self.actor_limits = actor_limits
def performAction(self, action):
""" A filtered mapping towards performAction of the underlying environment. """
if self.actor_limits:
action = self.denormalize(action)
self.env.performAction(action)
def getObservation(self):
""" A filtered mapping to getSensors of the underlying environment. """
sensors = self.env.getSensors()
if self.sensor_limits:
sensors = self.normalize(sensors)
return sensors
def getReward(self):
""" Compute and return the current reward (i.e. corresponding to the last action performed) """
return abstractMethod()
def normalize(self, sensors):
""" The function scales the parameters to be between -1 and 1. e.g. [(-pi, pi), (0, 1), (-0.001, 0.001)] """
assert(len(self.sensor_limits) == len(sensors))
result = []
for l, s in zip(self.sensor_limits, sensors):
if not l:
result.append(s)
else:
result.append((s - l[0]) / (l[1] - l[0]) * 2 - 1.0)
if self.clipping:
clip(result, -1, 1)
return asarray(result)
def denormalize(self, actors):
""" The function scales the parameters from -1 and 1 to the given interval (min, max) for each actor. """
assert(len(self.actor_limits) == len(actors))
result = []
for l, a in zip(self.actor_limits, actors):
if not l:
result.append(a)
else:
r = (a + 1.0) / 2 * (l[1] - l[0]) + l[0]
if self.clipping:
r = clip(r, l[0], l[1])
result.append(r)
return result
@property
def indim(self):
return self.env.indim
@property
def outdim(self):
return self.env.outdim
| bsd-3-clause | 2,163,094,764,573,265,000 | 36.204819 | 116 | 0.595531 | false | 3.969152 | false | false | false |
gannetson/sportschooldeopenlucht | apps/fund/migrations/0003_remove_m2m_payments_on_order.py | 1 | 15520 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing M2M table for field payments on 'Order'
db.delete_table(db.shorten_name(u'fund_order_payments'))
def backwards(self, orm):
# Adding M2M table for field payments on 'Order'
m2m_table_name = db.shorten_name(u'fund_order_payments')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('order', models.ForeignKey(orm[u'fund.order'], null=False)),
('payment', models.ForeignKey(orm[u'cowry.payment'], null=False))
))
db.create_unique(m2m_table_name, ['order_id', 'payment_id'])
models = {
u'accounts.bluebottleuser': {
'Meta': {'object_name': 'BlueBottleUser'},
'about': ('django.db.models.fields.TextField', [], {'max_length': '265', 'blank': 'True'}),
'availability': ('django.db.models.fields.CharField', [], {'max_length': '25', 'blank': 'True'}),
'available_time': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'birthdate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'contribution': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'deleted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'newsletter': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'picture': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'primary_language': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'share_money': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'share_time_knowledge': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'user_type': ('django.db.models.fields.CharField', [], {'default': "'person'", 'max_length': '25'}),
'username': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'why': ('django.db.models.fields.TextField', [], {'max_length': '265', 'blank': 'True'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'fund.customvoucherrequest': {
'Meta': {'object_name': 'CustomVoucherRequest'},
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.BlueBottleUser']", 'null': 'True'}),
'contact_email': ('django.db.models.fields.EmailField', [], {'default': "''", 'max_length': '75', 'blank': 'True'}),
'contact_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'contact_phone': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '500', 'blank': 'True'}),
'number': ('django.db.models.fields.PositiveIntegerField', [], {}),
'organization': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '20', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'unknown'", 'max_length': '20'}),
'value': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'})
},
u'fund.donation': {
'Meta': {'object_name': 'Donation'},
'amount': ('django.db.models.fields.PositiveIntegerField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'donation_type': ('django.db.models.fields.CharField', [], {'default': "'one_off'", 'max_length': '20', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.Project']"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '20', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.BlueBottleUser']", 'null': 'True', 'blank': 'True'})
},
u'fund.order': {
'Meta': {'object_name': 'Order'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recurring': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'current'", 'max_length': '20', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.BlueBottleUser']", 'null': 'True', 'blank': 'True'})
},
u'fund.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['fund.Order']"})
},
u'fund.recurringdirectdebitpayment': {
'Meta': {'object_name': 'RecurringDirectDebitPayment'},
'account': ('apps.fund.fields.DutchBankAccountField', [], {'max_length': '10'}),
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['accounts.BlueBottleUser']", 'unique': 'True'})
},
u'fund.voucher': {
'Meta': {'object_name': 'Voucher'},
'amount': ('django.db.models.fields.PositiveIntegerField', [], {}),
'code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'max_length': '3', 'blank': 'True'}),
'donations': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['fund.Donation']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '2'}),
'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '500', 'blank': 'True'}),
'receiver': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'receiver'", 'null': 'True', 'to': u"orm['accounts.BlueBottleUser']"}),
'receiver_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'receiver_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'sender': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sender'", 'null': 'True', 'to': u"orm['accounts.BlueBottleUser']"}),
'sender_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'sender_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '20', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
u'projects.partnerorganization': {
'Meta': {'object_name': 'PartnerOrganization'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'projects.project': {
'Meta': {'ordering': "['title']", 'object_name': 'Project'},
'coach': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'team_member'", 'null': 'True', 'to': u"orm['accounts.BlueBottleUser']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'owner'", 'to': u"orm['accounts.BlueBottleUser']"}),
'partner_organization': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.PartnerOrganization']", 'null': 'True', 'blank': 'True'}),
'phase': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'popularity': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
}
}
complete_apps = ['fund'] | bsd-3-clause | 4,318,038,385,452,015,600 | 81.558511 | 187 | 0.556637 | false | 3.640629 | false | false | false |
casebor/VisMol | VisMol3/visual/vismol_shaders.py | 1 | 25248 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# vismol_shaders.py
#
# Copyright 2016 Labio <labio@labio-XPS-8300>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
my_glLigth = """
struct gl_LightSourceParameters {
vec4 ambient; // Aclarri
vec4 diffuse; // Dcli
vec4 specular; // Scli
vec4 position; // Ppli
vec4 halfVector; // Derived: Hi
vec3 spotDirection; // Sdli
float spotExponent; // Srli
float spotCutoff; // Crli
// (range: [0.0,90.0], 180.0)
float spotCosCutoff; // Derived: cos(Crli)
// (range: [1.0,0.0],-1.0)
float constantAttenuation; // K0
float linearAttenuation; // K1
float quadraticAttenuation;// K2
};
uniform gl_LightSourceParameters gl_LightSource[gl_MaxLights];
"""
my_glMaterial = """
struct gl_MaterialParameters {
vec4 emission; // Ecm
vec4 ambient; // Acm
vec4 diffuse; // Dcm
vec4 specular; // Scm
float shininess; // Srm
};
uniform gl_MaterialParameters gl_FrontMaterial;
uniform gl_MaterialParameters gl_BackMaterial;
"""
vertex_shader = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
in vec3 vert_coord;
in vec3 vert_color;
out vec3 sh_color;
void main()
{
gl_Position = projection_mat * view_mat * model_mat * vec4(vert_coord, 1.0);
sh_color = vert_color;
}
"""
fragment_shader = """
#version 330
in vec3 sh_color;
out vec4 final_color;
void main()
{
final_color = vec4(sh_color, 1.0);
}
"""
geometry_shader = """
#version 330
in Coords {
vec4 my_cords;
vec3 my_col;
} corners[];
out vec3 sh_color;
void main(){
gl_Position = corners[0].my_cords;
sh_color = corners[0].my_col;
}
"""
vertex_shader2 = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
in vec3 vert_coord;
in vec3 vert_color;
out vec3 frag_vert;
out vec3 frag_color;
out vec3 frag_normal;
void main(){
gl_Position = projection_mat * view_mat * model_mat * vec4(vert_coord, 1.0);
frag_vert = vec3(view_mat * model_mat * vec4(vert_coord, 1.0));
frag_color = vert_color;
frag_normal = frag_vert;
}
"""
fragment_shader2 = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_vert;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
vec3 normal = normalize(normal_mat * frag_normal);
vec3 vert_to_light = normalize(my_light.position - frag_vert);
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * my_light.color * frag_color;
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
vec3 incidence_vec = -vert_to_light;
vec3 reflection_vec = reflect(incidence_vec, normal);
vec3 vert_to_cam = normalize(cam_pos - frag_vert);
float cos_angle = max(0.0, dot(vert_to_cam, reflection_vec));
float specular_coef = pow(cos_angle, my_light.shininess);
vec3 specular = specular_coef * my_light.specular_color * my_light.intensity;
final_color = vec4(ambient + diffuse + specular, 1.0);
}
"""
vertex_shader3 = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
in vec3 coordinate;
in vec3 vert_color;
out vec3 frag_coord;
out vec3 frag_color;
out vec3 frag_normal;
void main(){
gl_Position = projection_mat * view_mat * model_mat * vec4(coordinate, 1.0);
frag_coord = vec3(model_mat * vec4(coordinate, 1.0));
frag_normal = coordinate;
frag_color = vert_color;
}
"""
fragment_shader3 = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_coord;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
vec3 normal = normalize(normal_mat * frag_normal);
vec3 vert_to_light = normalize(my_light.position - frag_coord);
vec3 vert_to_cam = normalize(cam_pos - frag_coord);
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
float specular_coef = 0.0;
if (diffuse_coef > 0.0)
specular_coef = pow(max(0.0, dot(vert_to_cam, reflect(-vert_to_light, normal))), my_light.shininess);
vec3 specular = specular_coef * vec3(1) * my_light.intensity;
specular = specular * (vec3(1) - diffuse);
final_color = vec4(ambient + diffuse + specular, 1.0);
}
"""
vertex_shader4 = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 coordinate;
in vec3 center;
in vec3 vert_color;
out vec3 frag_coord;
out vec3 frag_color;
out vec3 frag_normal;
void main(){
mat4 modelview = view_mat * model_mat;
gl_Position = projection_mat * modelview * vec4(coordinate, 1.0);
frag_coord = -vec3(modelview * vec4(coordinate, 1.0));
frag_normal = normalize(normal_mat * (coordinate - center));
frag_color = vert_color;
}
"""
fragment_shader4 = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_coord;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
//vec3 normal = normalize(frag_normal);
//vec3 eye = normalize(frag_coord);
//
//vec3 vert_to_light = normalize(vec3(view_mat*vec4(my_light.position, 0.0)));
////vec3 vert_to_cam = normalize(frag_coord);
//
//vec3 spec = vec3(0.0);
//float intensity = max(dot(normal, vert_to_light), 0.0);
//if (intensity>0.0){
// vec3 h = normalize(vert_to_light + eye);
// float int_spec = max(dot(h, normal), 0.0);
// spec = my_light.intensity * pow(int_spec, my_light.shininess);
//}
//vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
//float diffuse_coef = max(0.0, dot(normal, vert_to_light));
//vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
//final_color = vec4(intensity * diffuse + spec + ambient, 1.0);
vec3 normal = normalize(frag_normal);
vec3 vert_to_light = normalize(my_light.position);
vec3 vert_to_cam = normalize(frag_coord);
// Ambient Component
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
// Diffuse component
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
// Specular component
float specular_coef = 0.0;
if (diffuse_coef > 0.0)
specular_coef = pow(max(0.0, dot(vert_to_cam, reflect(-vert_to_light, normal))), my_light.shininess);
vec3 specular = specular_coef * my_light.intensity;
specular = specular * (vec3(1) - diffuse);
final_color = vec4(ambient + diffuse + specular, 1.0);
}
"""
vertex_shader_sphere = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 vert_coord;
in vec3 vert_center;
in vec3 vert_color;
out vec3 frag_coord;
out vec3 frag_color;
out vec3 frag_normal;
void main(){
mat4 modelview = view_mat * model_mat;
gl_Position = projection_mat * modelview * vec4(vert_coord, 1.0);
frag_coord = -vec3(modelview * vec4(vert_coord, 1.0));
frag_normal = normalize(normal_mat * (vert_coord - vert_center));
frag_normal = normalize(normal_mat * (vert_coord - vert_center));
frag_color = vert_color;
}
"""
fragment_shader_sphere = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_coord;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
vec3 normal = normalize(frag_normal);
vec3 vert_to_light = normalize(my_light.position);
vec3 vert_to_cam = normalize(frag_coord);
// Ambient Component
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
// Diffuse component
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
// Specular component
float specular_coef = 0.0;
if (diffuse_coef > 0.0)
specular_coef = pow(max(0.0, dot(vert_to_cam, reflect(-vert_to_light, normal))), my_light.shininess);
vec3 specular = specular_coef * my_light.intensity;
specular = specular * (vec3(1) - diffuse);
final_color = vec4(ambient + diffuse + specular, 1.0);
}
"""
vertex_shader_crystal = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 vert_coord;
in vec3 vert_center;
in vec3 vert_color;
out vec3 frag_coord;
out vec3 frag_normal;
out vec3 frag_color;
void main(){
mat4 modelview = view_mat * model_mat;
gl_Position = projection_mat * modelview * vec4(vert_coord, 1.0);
frag_coord = -vec3(modelview * vec4(vert_coord, 1.0));
frag_normal = normalize(normal_mat * (vert_coord - vert_center));
frag_color = vert_color;
}
"""
fragment_shader_crystal = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_coord;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
vec3 normal = normalize(frag_normal);
vec3 vert_to_light = normalize(my_light.position);
vec3 vert_to_cam = normalize(frag_coord);
// Ambient Component
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
// Diffuse component
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
final_color = vec4(ambient + diffuse, 0.6);
}
"""
vertex_shader_dot_surface = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 vert_coord;
in vec3 vert_color;
out vec3 frag_color;
void main(){
gl_Position = projection_mat * view_mat * model_mat * vec4(vert_coord, 1.0);
frag_color = vert_color;
}
"""
fragment_shader_dot_surface = """
#version 330
in vec3 frag_color;
out vec4 final_color;
void main(){
final_color = vec4(frag_color, 1.0);
}
"""
vertex_shader_directional_light = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 coordinate;
in vec3 center;
in vec3 vert_color;
out vec3 frag_coord;
out vec3 frag_color;
out vec3 frag_normal;
void main(){
mat4 modelview = view_mat * model_mat;
gl_Position = projection_mat * modelview * vec4(coordinate, 1.0);
frag_coord = -vec3(modelview * vec4(coordinate, 1.0));
frag_normal = normalize(normal_mat * (coordinate - center));
frag_color = vert_color;
}
"""
fragment_shader_directional_light = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_coord;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
vec3 normal = normalize(frag_normal);
vec3 vert_to_light = normalize(my_light.position);
vec3 vert_to_cam = normalize(frag_coord);
// Ambient Component
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
// Diffuse component
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
// Specular component
float specular_coef = 0.0;
if (diffuse_coef > 0.0)
specular_coef = pow(max(0.0, dot(vert_to_cam, reflect(-vert_to_light, normal))), my_light.shininess);
vec3 specular = specular_coef * my_light.intensity;
specular = specular * (vec3(1) - diffuse);
final_color = vec4(ambient + diffuse + specular, 1.0);
}
"""
vertex_shader_point_light = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
in vec3 coordinate;
in vec3 vert_color;
out vec3 frag_coord;
out vec3 frag_color;
out vec3 frag_normal;
void main(){
gl_Position = projection_mat * view_mat * model_mat * vec4(coordinate, 1.0);
frag_coord = vec3(model_mat * vec4(coordinate, 1.0));
frag_normal = coordinate;
frag_color = vert_color;
}
"""
fragment_shader_point_light = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_coord;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
vec3 normal = normalize(normal_mat * frag_normal);
vec3 vert_to_light = normalize(my_light.position - frag_coord);
vec3 vert_to_cam = normalize(cam_pos - frag_coord);
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
float specular_coef = 0.0;
if (diffuse_coef > 0.0)
specular_coef = pow(max(0.0, dot(vert_to_cam, reflect(-vert_to_light, normal))), my_light.shininess);
vec3 specular = specular_coef * vec3(1) * my_light.intensity;
specular = specular * (vec3(1) - diffuse);
final_color = vec4(ambient + diffuse + specular, 1.0);
}
"""
vertex_shader_dots = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform float vert_ext_linewidth;
uniform float vert_int_antialias;
uniform float vert_dot_factor;
in vec3 vert_coord;
in vec3 vert_color;
in float vert_dot_size;
attribute vec4 bckgrnd_color;
varying float frag_dot_size;
varying float frag_ext_linewidth;
varying float frag_int_antialias;
varying vec4 frag_dot_color;
varying vec4 frag_bckgrnd_color;
void main(){
frag_dot_size = vert_dot_size * vert_dot_factor;
frag_ext_linewidth = vert_ext_linewidth;
frag_int_antialias = vert_int_antialias;
frag_dot_color = vec4(vert_color, 1.0);
frag_bckgrnd_color = bckgrnd_color;
gl_Position = projection_mat * view_mat * model_mat * vec4(vert_coord, 1);
gl_PointSize = vert_dot_size + 2*(vert_ext_linewidth + 1.5*vert_int_antialias);
}
"""
fragment_shader_dots = """
#version 330
out vec4 final_color;
// ------------------------------------
varying vec4 frag_bckgrnd_color;
varying vec4 frag_dot_color;
varying float frag_dot_size;
varying float frag_ext_linewidth;
varying float frag_int_antialias;
// ------------------------------------
float disc(vec2 P, float size)
{
float r = length((P.xy - vec2(0.5,0.5))*size);
r -= frag_dot_size/2;
return r;
}
// ----------------
float arrow_right(vec2 P, float size)
{
float r1 = abs(P.x -.50)*size + abs(P.y -.5)*size - frag_dot_size/2;
float r2 = abs(P.x -.25)*size + abs(P.y -.5)*size - frag_dot_size/2;
float r = max(r1,-r2);
return r;
}
// ----------------
float ring(vec2 P, float size)
{
float r1 = length((gl_PointCoord.xy - vec2(0.5,0.5))*size) - frag_dot_size/2;
float r2 = length((gl_PointCoord.xy - vec2(0.5,0.5))*size) - frag_dot_size/4;
float r = max(r1,-r2);
return r;
}
// ----------------
float clober(vec2 P, float size)
{
const float PI = 3.14159265358979323846264;
const float t1 = -PI/2;
const vec2 c1 = 0.2*vec2(cos(t1),sin(t1));
const float t2 = t1+2*PI/3;
const vec2 c2 = 0.2*vec2(cos(t2),sin(t2));
const float t3 = t2+2*PI/3;
const vec2 c3 = 0.2*vec2(cos(t3),sin(t3));
float r1 = length((gl_PointCoord.xy- vec2(0.5,0.5) - c1)*size);
r1 -= frag_dot_size/3;
float r2 = length((gl_PointCoord.xy- vec2(0.5,0.5) - c2)*size);
r2 -= frag_dot_size/3;
float r3 = length((gl_PointCoord.xy- vec2(0.5,0.5) - c3)*size);
r3 -= frag_dot_size/3;
float r = min(min(r1,r2),r3);
return r;
}
// ----------------
float square(vec2 P, float size)
{
float r = max(abs(gl_PointCoord.x -.5)*size,
abs(gl_PointCoord.y -.5)*size);
r -= frag_dot_size/2;
return r;
}
// ----------------
float diamond(vec2 P, float size)
{
float r = abs(gl_PointCoord.x -.5)*size + abs(gl_PointCoord.y -.5)*size;
r -= frag_dot_size/2;
return r;
}
// ----------------
float vbar(vec2 P, float size)
{
float r1 = max(abs(gl_PointCoord.x -.75)*size,
abs(gl_PointCoord.x -.25)*size);
float r3 = max(abs(gl_PointCoord.x -.5)*size,
abs(gl_PointCoord.y -.5)*size);
float r = max(r1,r3);
r -= frag_dot_size/2;
return r;
}
// ----------------
float hbar(vec2 P, float size)
{
float r2 = max(abs(gl_PointCoord.y -.75)*size,
abs(gl_PointCoord.y -.25)*size);
float r3 = max(abs(gl_PointCoord.x -.5)*size,
abs(gl_PointCoord.y -.5)*size);
float r = max(r2,r3);
r -= frag_dot_size/2;
return r;
}
// ----------------
float cross(vec2 P, float size)
{
float r1 = max(abs(gl_PointCoord.x -.75)*size,
abs(gl_PointCoord.x -.25)*size);
float r2 = max(abs(gl_PointCoord.y -.75)*size,
abs(gl_PointCoord.y -.25)*size);
float r3 = max(abs(gl_PointCoord.x -.5)*size,
abs(gl_PointCoord.y -.5)*size);
float r = max(min(r1,r2),r3);
r -= frag_dot_size/2;
return r;
}
void main(){
float size = frag_dot_size +2*(frag_ext_linewidth + 1.5*frag_int_antialias);
float t = frag_ext_linewidth/2.0-frag_int_antialias;
// gl_PointCoord is the pixel in the coordinate
float r = disc(gl_PointCoord, size);
float d = abs(r) - t;
// This if else statement makes the circle ilusion
if( r > (frag_ext_linewidth/2.0+frag_int_antialias)){
discard;
}
else if( d < 0.0 ){
final_color = frag_bckgrnd_color;
}
else{
float alpha = d/frag_int_antialias;
alpha = exp(-alpha*alpha);
if (r > 0)
final_color = frag_bckgrnd_color;
else
final_color = mix(frag_dot_color, frag_bckgrnd_color, alpha);
}
}
"""
vertex_shader_lines = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 coordinate;
in vec3 vert_color;
out vec4 frag_color;
out vec4 view_space;
void main(){
gl_Position = projection_mat * view_mat * model_mat * vec4(coordinate, 1.0);
frag_color = vec4(vert_color, 1.0);
view_space = view_mat * model_mat * vec4(coordinate, 1.0);
}
"""
fragment_shader_lines = """
#version 330
uniform vec4 fog_color;
uniform float fog_start;
uniform float fog_end;
in vec4 frag_color;
in vec4 view_space;
out vec4 final_color;
void main(){
float dist = abs(view_space.z);
if(dist>=fog_start){
float fog_factor = (fog_end-dist)/(fog_end-fog_start);
final_color = mix(fog_color, frag_color, fog_factor);
}
else{
final_color = frag_color;
}
}
"""
vertex_shader_sphere = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 vert_coord;
in vec3 vert_center;
in vec3 vert_color;
out vec3 frag_coord;
out vec3 frag_color;
out vec3 frag_normal;
void main(){
mat4 modelview = view_mat * model_mat;
gl_Position = projection_mat * modelview * vec4(vert_coord, 1.0);
frag_coord = -vec3(modelview * vec4(vert_coord, 1.0));
frag_normal = normalize(normal_mat * (vert_coord - vert_center));
frag_normal = normalize(normal_mat * (vert_coord - vert_center));
frag_color = vert_color;
}
"""
fragment_shader_sphere = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_coord;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
vec3 normal = normalize(frag_normal);
vec3 vert_to_light = normalize(my_light.position);
vec3 vert_to_cam = normalize(frag_coord);
// Ambient Component
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
// Diffuse component
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
// Specular component
float specular_coef = 0.0;
if (diffuse_coef > 0.0)
specular_coef = pow(max(0.0, dot(vert_to_cam, reflect(-vert_to_light, normal))), my_light.shininess);
vec3 specular = specular_coef * my_light.intensity;
specular = specular * (vec3(1) - diffuse);
final_color = vec4(ambient + diffuse + specular, 1.0);
}
"""
v_s_glumpy = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 vert_coord; // attribute vec3 position;
in vec3 vert_color; // attribute vec3 color;
in float vert_dot_size; // attribute float radius;
//const float vert_dot_size = 0.5; // attribute float radius;
out vec3 frag_color; // varying vec3 v_color;
out float f_radius; // varying float v_radius;
out float f_size; // varying float v_size;
out vec4 frag_coord; // varying vec4 v_eye_position;
varying vec3 v_light_direction;
void main (void)
{
frag_color = vert_color;
f_radius = vert_dot_size;
frag_coord = view_mat * model_mat * vec4(vert_coord, 1.0);
v_light_direction = normalize(vec3(0,0,2));
gl_Position = projection_mat * frag_coord;
vec4 p = projection_mat * vec4(vert_dot_size, vert_dot_size, frag_coord.z, frag_coord.w);
f_size = 512.0 * p.x / p.w;
gl_PointSize = f_size + 5.0;
}
"""
f_s_glumpy = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
vec4 outline(float distance, float linewidth, float antialias, vec4 fg_color, vec4 bg_color){
vec4 frag_color;
float t = linewidth/2.0 - antialias;
float signed_distance = distance;
float border_distance = abs(signed_distance) - t;
float alpha = border_distance/antialias;
alpha = exp(-alpha*alpha);
if( border_distance < 0.0 )
frag_color = fg_color;
else if( signed_distance < 0.0 )
frag_color = mix(bg_color, fg_color, sqrt(alpha));
else {
if( abs(signed_distance) < (linewidth/2.0 + antialias) ) {
frag_color = vec4(fg_color.rgb, fg_color.a * alpha);
} else {
discard;
}
}
return frag_color;
}
in vec3 frag_color; // varying vec3 v_color;
in float f_radius; // varying float v_radius;
in float f_size; // varying float v_size;
in vec4 frag_coord; // varying vec4 v_eye_position;
varying vec3 v_light_direction;
void main()
{
vec2 P = gl_PointCoord.xy - vec2(0.5,0.5);
float point_size = f_size + 5.0;
float distance = length(P*point_size) - f_size/2;
vec2 texcoord = gl_PointCoord* 2.0 - vec2(1.0);
float x = texcoord.x;
float y = texcoord.y;
float d = 1.0 - x*x - y*y;
if (d <= 0.0) discard;
float z = sqrt(d);
vec4 pos = frag_coord;
pos.z += f_radius*z;
vec3 pos2 = pos.xyz;
pos = projection_mat * pos;
gl_FragDepth = 0.5*(pos.z / pos.w)+0.5;
vec3 normal = vec3(x,y,z);
float diffuse = clamp(dot(normal, v_light_direction), 0.0, 1.0);
vec4 color = vec4((0.5 + 0.5*diffuse)*frag_color, 1.0);
gl_FragColor = outline(distance, 1.0, 1.0, vec4(0,0,0,1), color);
// gl_FragColor = color;
}
"""
| gpl-3.0 | 8,508,734,336,131,615,000 | 24.273273 | 107 | 0.651418 | false | 2.863559 | false | false | false |
sarareginaff/AutoencodersDeteccaoFalha | variationalAutoencoder.py | 1 | 8937 | ###################################### Variational Autoencoder ############################################
## Author: Sara Regina Ferreira de Faria
## Email: [email protected]
#Needed libraries
import numpy
import matplotlib.pyplot as plt
import pandas
import math
import scipy.io as spio
import scipy.ndimage
from scipy.stats import norm
from keras.layers import Layer
from sklearn.metrics import mean_squared_error, roc_curve, auc
from keras import backend as K
from keras import metrics
# fix random seed for reproducibility
numpy.random.seed(7)
# load the dataset
def loadData(file, dictName):
matfile = file
matdata = spio.loadmat(matfile)
dataset = numpy.ndarray(shape=(matdata[dictName].shape[1]), dtype=type(matdata[dictName][0,0]))
for i in range(matdata[dictName].shape[1]):
dataset[i] = matdata[dictName][0, i]
return dataset
# normalize dataset
def normalizeData(data):
maxVal = numpy.amax(data)
minVal = numpy.amin(data)
normalizedData = ((data-minVal)/(maxVal-minVal))
return normalizedData
# based on http://machinelearningmastery.com/time-series-prediction-with-deep-learning-in-python-with-keras/
# convert an array of values into a dataset matrix
def createMatrix(dataset, look_back=1):
dataX, dataY = [], []
for i in range(len(dataset)-look_back-1):
a = dataset[i:(i+look_back)]
dataX.append(a)
return numpy.array(dataX)
# based on https://blog.keras.io/building-autoencoders-in-keras.html
def sampling(args):
z_mean, z_log_var = args
x_train_latent_shape = (original_dim[0], latent_dim)
epsilon = K.random_normal(shape=((batchSizeModel,) + x_train_latent_shape), mean=0., #40, 480, 3, 2
stddev=epsilon_std)
return z_mean + K.exp(z_log_var / 2) * epsilon
# based on https://edouardfouche.com/Neural-based-Outlier-Discovery/
def calculateFprTpr (predicted, labels):
dist = numpy.zeros(len(predicted))
for i in range(len(predicted)):
dist[i] = numpy.linalg.norm(predicted[i])
fpr, tpr, thresholds = roc_curve(labels, dist)
return fpr, tpr
class CustomVariationalLayer(Layer):
def __init__(self, **kwargs):
self.is_placeholder = True
super(CustomVariationalLayer, self).__init__(**kwargs)
def vae_loss(self, x, x_decoded_mean):
xent_loss = original_dim[1] * metrics.binary_crossentropy(x, x_decoded_mean)
kl_loss = - 0.5 * K.sum(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1)
return K.mean(xent_loss + kl_loss)
def call(self, inputs):
x = inputs[0]
x_decoded_mean = inputs[1]
loss = self.vae_loss(x, x_decoded_mean)
self.add_loss(loss, inputs=inputs)
# We won't actually use the output.
return x
def vae_loss1(x, x_decoded_mean):
xent_loss = original_dim[1] * metrics.binary_crossentropy(x, x_decoded_mean)
kl_loss = - 0.5 * K.sum(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1)
return K.mean(xent_loss + kl_loss)
#************* MAIN *****************#
# variables
best_roc_auc = 0
best_epochs = 0
best_limit = 0
best_bottleneckDim = 0
best_look_back = 0
best_epsilon_std = 0
best_latent_dim = 0
for epochs in range(7,8): #16
print("epochs", epochs)
for limitAux in range(18,19): #12
limit = limitAux/10
print("limit", limit)
for bottleneckDim in range (4,5): #4
print("bottleneckDim", bottleneckDim)
for look_back in range(3,4): #2
print("look_back", look_back)
for epsilon_stdAux in range(3,4):
epsilon_std = epsilon_stdAux/10
print("epsilon_std", epsilon_std)
for latent_dim in range(1,2):
print("latent_dim", latent_dim)
# libraries
from keras.models import Model, Sequential
from keras.layers import Input, Dense, LSTM, RepeatVector, Lambda, Layer
batchSizeData = 1
lossEvaluation = 'mean_squared_error'
optimizer = 'adam'
batchSizeModel = look_back
roc_auc = []
FPRs = []
TPRs = []
# load dataset with all fault simulation
originalDataset = loadData('DadosTodasFalhas.mat', 'Xsep')
# prepare dataset
filteredDataset = scipy.ndimage.filters.gaussian_filter(originalDataset[0][:,:], 4.0)
#filteredDataset = originalDataset[0][:,:]
normalizedDataset = normalizeData(filteredDataset)
dataset = createMatrix(normalizedDataset, look_back)
# split into train and test sets
train_size = int(len(dataset) * 0.67)
test_size = len(dataset) - train_size
x_train, x_test = dataset[0:train_size,:], dataset[train_size:len(dataset),:]
# get sample size
original_dim = (x_train.shape[1], x_train.shape[2])
# encoder
x = Input(shape=(original_dim)) #batchSizeModel, original_dim (22)
h = LSTM(int(bottleneckDim), activation='relu')(x)
z_mean = Dense(latent_dim)(h) #batchSizeModel,latent_dim
z_log_var = Dense(latent_dim)(h) #batchSizeModel,latent_dim
z = Lambda(sampling)([z_mean, z_log_var])
# decoder
decoded = RepeatVector(original_dim[0])(z_log_var)
h_decoded = LSTM(original_dim[1], return_sequences=True, activation='relu')(decoded)
x_decoded_mean = Dense(original_dim[1], activation='sigmoid')(h_decoded) #batchSizeModel,original_dim
# autoencodoer
Model = Model(x, x_decoded_mean)
Model.compile(optimizer='rmsprop', loss=vae_loss1)
# Train model with normal data
Model.fit(x_train, x_train, shuffle=True, epochs=epochs, batch_size=batchSizeModel, validation_data=(x_test, x_test), verbose=False)
# get error for each batch of normal data
normalPredict = []
normalError = []
j = 0
for k in range(0,len(dataset),batchSizeModel):
dataBatch = dataset[k:k+batchSizeModel]
normalPredict.append(Model.predict(dataBatch))
normalError.append(mean_squared_error(dataBatch[:,0,:], normalPredict[j][:,0,:]))
j += 1
#***** Testing if it is a fault or not *****#
for i in range(1,len(originalDataset)):
#local variables
j = 0
faults = []
trainPredict = []
faultError = []
predicted = []
# prepare dataset
filteredDataset = scipy.ndimage.filters.gaussian_filter(originalDataset[i][:,:], 4.0)
#filteredDataset = originalDataset[i][:,:]
normalizedDataset = normalizeData(filteredDataset)
dataset = createMatrix(normalizedDataset, look_back)
# get error for each batch of data
for k in range(0,len(dataset),batchSizeModel):
dataBatch = dataset[k:k+batchSizeModel]
# generate predictions using model
trainPredict.append(Model.predict(dataBatch))
predicted.append(trainPredict[j][:,0,:])
faultError.append(mean_squared_error(dataBatch[:,0,:], predicted[j]))
# check if it is a fault or not
if (faultError[j] > normalError[j]*limit):
faults.append(1)
else:
faults.append(0)
j = j + 1
#print("Dataset", i, ". IsFaultVector: ", faults)
# define labels to ROC curve
labels = []
for k in range(0,len(dataset),batchSizeModel):
if (k >= 100):
labels.append(1)
if (k < 100):
labels.append(0)
# calculate AUC, fpr and tpr
fpr, tpr = calculateFprTpr(faults, labels)
FPRs.append(fpr)
TPRs.append(tpr)
roc_auc.append(auc(fpr, tpr))
sum_roc_auc = 0
for i in range(len(roc_auc)):
sum_roc_auc += roc_auc[i]
if (sum_roc_auc > best_roc_auc):
best_roc_auc = sum_roc_auc
best_epochs = epochs
best_limit = limit
best_bottleneckDim = bottleneckDim
best_look_back = look_back
best_epsilon_std = epsilon_std
best_latent_dim = latent_dim
sum_selected_roc_auc = 0
for j in range(len(FPRs)):
i = j+1
if(i == 1 or i == 2 or i == 5 or i == 7 or i == 8 or i == 9 or i == 10 or i == 11 or i == 12 or i == 14 or i == 15 or i == 19):
plt.plot(FPRs[j], TPRs[j], label="AUC{0}= {1:0.2f}".format(i+1, roc_auc[j]))
sum_selected_roc_auc += roc_auc[j]
plt.xlim((0,1))
plt.ylim((0,1))
plt.plot([0, 1], [0, 1], color='navy', linestyle='--')
plt.xlabel('False Positive rate')
plt.ylabel('True Positive rate')
plt.title('ROC curve - Variational Autoencoder')
plt.legend(loc="lower right")
plt.show()
#plot baseline and predictions
#plt.plot(normalizedDataset)
#plt.plot(numpy.concatenate( predicted, axis=0 ))
#plt.show()
#plt.plot(roc_auc)
#plt.show()
print("bests parameters")
print("best_limit", best_limit) #1
print("best_epochs", best_epochs) #10
print("best_roc_auc", best_roc_auc) #11.27
print("best_look_back", best_look_back) #1
print("best_bottleneckDim", best_bottleneckDim) #2
print("best_epsilon_std", best_epsilon_std)
print("best_latent_dim", best_latent_dim)
print("sum_selected_roc_auc", sum_selected_roc_auc)
| mit | 8,103,235,079,007,856,000 | 32.223048 | 138 | 0.634665 | false | 3.041865 | true | false | false |
espenhgn/ViSAPy | examples/example_in_vitro_MEA/example_in_vitro_MEA.py | 1 | 11523 | #!/usr/bin/env python
'''do sims of cells, LFP, and output data'''
#import modules
import uuid
import numpy as np
import h5py
import os
from glob import glob
#workaround for plots on cluster
if not os.environ.has_key('DISPLAY'):
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from scipy.signal import filtfilt, butter, lfilter
from time import time, asctime
import ViSAPy
import MoI
import neuron
from mpi4py import MPI
######## set random number generator seed ######################################
SEED = 1234567
POPULATIONSEED = 1234567
np.random.seed(SEED)
################# Initialization of MPI stuff ##################################
COMM = MPI.COMM_WORLD
SIZE = COMM.Get_size()
RANK = COMM.Get_rank()
######## create unique output folder and copy simulation script ################
if RANK == 0:
#savefolder = glob('savedata_in_vitro_MEA*')[-1]
string = asctime().split()
savefolder = os.path.join(os.path.split(__file__)[0], 'savedata_in_vitro_MEA_')
for s in string:
for ss in s.split(':'):
savefolder += ss + '_'
savefolder += uuid.uuid4().hex
os.mkdir(savefolder)
os.system("cp %s '%s'" % (__file__, savefolder + '/.'))
else:
savefolder = None
savefolder = COMM.bcast(savefolder, root=0)
##### load NMODL mechanisms ####################################################
#neuron.h.load_file('stdlib.hoc')
#neuron.h.load_file('nrngui.hoc')
neuron.load_mechanisms("modfiles")
################################################################################
# PARAMETERS
################################################################################
tstart = 0
tend = 60000
dt = 0.05
#set up base parameter file for the LFPy.Cell or LFPy.TemplateCell class,
#without specifying cell model.
cellParameters = {
'v_init' : -65,
'passive' : False,
'timeres_NEURON' : dt,
'timeres_python' : dt,
'tstartms' : tstart,
'tstopms' : tend,
'verbose' : False,
'pt3d' : False,
}
# set the default rotation of the cells
defaultrotation = {}
#LFPy can simulate directly to file, but for performance reasons, this
#feature should be avoided
simulationParameters = {
#'to_file' : True, #file_name set in cellsim()
}
#list up all model folders, associate model neurons by morphology name
morphologies = glob('neuron_models/Large/*/*Morph.hoc') + \
glob('neuron_models/Medium*/*/*Morph.hoc') + \
glob('neuron_models/Small*/*/*Morph.hoc')
#one custom code file per morphology
model_paths = glob('neuron_models/Large/*') + \
glob('neuron_models/Medium*/*') + \
glob('neuron_models/Small*/*')
#custom codes for cell simulations
custom_codes = []
for model_path in model_paths:
cell_name = os.path.split(model_path)[-1].lower()
custom_codes += [os.path.join(model_path, cell_name + '.hoc')]
def getParamsMoIMapping(
slice_thickness = 200.,
n_rows = 6,
n_cols = 17,
elec_sep = 18.,
elec_radius = 3.5):
'''Set up MEA with MoI'''
n_elecs = n_rows * n_cols
# FIXING EARLIER WRONG ELECTRODE POSITIONS
elec_x_int = np.load('z_integer.npy')
elec_y_int = np.load('y_integer.npy')
# For some reason they seem to need individual scaling factors. From pythagoras
ky = 9 / np.max(np.diff(sorted(elec_y_int)))
###kx = 9 * np.sqrt(3) / np.max(np.diff(sorted(elec_x_int)))
elec_x = elec_x_int * ky
elec_y = elec_y_int * ky
elec_x -= np.min(elec_x)
elec_y -= np.min(elec_y)
paramsMapping = {
'use_line_source': True,
'include_elec': True,
'elec_z' : -slice_thickness/2., #SCALAR
'elec_y' : elec_y, # ARRAY
'elec_x' : elec_x, # ARRAY
'elec_radius': elec_radius,
'n_avrg_points' : 10, #Number of electrode averaging points
}
paramsMoI = {
'sigma_G': 0.0, # Below electrode
'sigma_S': 1.5, # Saline conductivity
'sigma_T': 0.1, # Tissue conductivity
'slice_thickness': slice_thickness,
'steps' : 10,}
return paramsMapping, paramsMoI
paramsMapping, paramsMoI = getParamsMoIMapping()
#dummy electrodeParameters
electrodeParameters = dict(
x = paramsMapping['elec_x'],
y = paramsMapping['elec_y'],
z = np.array([paramsMapping['elec_z'] for x in paramsMapping['elec_x']]),
)
def getPopParams(#NCOLS=1, NROWS=1,
NCOLS=4, NROWS=14,
PITCH=np.sqrt(2/(np.sqrt(3)*1400))*1E3, #~1400 mm-2, hex tiling
PITCH_STD=5.,
HEIGHT = 15.,
HEIGHT_STD = 1.,
XOFFSET=0., YOFFSET=0., ZOFFSET=-100):
#set up hexagonal grid of cells
POPULATION_SIZE = NCOLS * NROWS
x = []
y = []
for i in xrange(NROWS):
if i % 2 == 0:
x = np.r_[x, np.arange(NCOLS)*PITCH]
else:
x = np.r_[x, np.arange(NCOLS)*PITCH + np.cos(np.pi/3)*PITCH]
y = np.r_[y, i * np.ones(NCOLS) * np.sin(np.pi/3) * PITCH]
#apply spatial jitter and center population on MEA grid
x += np.random.normal(scale=PITCH_STD, size=x.size, )
x -= x.mean()
x += XOFFSET
y += np.random.normal(scale=PITCH_STD, size=y.size, )
y -= y.mean()
y += YOFFSET
z = np.random.normal(ZOFFSET+HEIGHT, HEIGHT_STD, x.size)
return dict(
POPULATION_SIZE = NCOLS * NROWS,
X = x,
Y = y,
Z = z
)
populationParameters = getPopParams(XOFFSET = paramsMapping['elec_x'].mean(),
YOFFSET = paramsMapping['elec_y'].mean(),
ZOFFSET = paramsMapping['elec_z'])
#set up stimulus by graded synapse input modeled as OU process conductance
gsynParams = dict(
OUParams = dict(
T = (tend - tstart)*1E-3,
dt = dt*1E-3,
X0 = 0,
m = 0,
sigma = 1.,
nX = populationParameters['POPULATION_SIZE']),
lambda_d = np.sqrt(2/(np.sqrt(3)*1400))*1E3, #mean cell pitch
gsyn_mean = 1. / 50000,
gsyn_std = 1. / 75000,
)
#some signal processing parameters
nyquist = 1000. / cellParameters['timeres_python'] / 2
filters = []
#presample filter to avoid aliasing
b, a = butter(1, np.array([0.5, 8000]) / nyquist, btype='pass')
filters.append({
'b' : b,
'a' : a,
'filterFun' : lfilter
})
#filter parameters, filterFun must be either scipy.signal.lfilter or filtfilt
b, a = butter(4, np.array([300, 5000]) / nyquist, btype='pass')
filters.append({
'b' : b,
'a' : a,
'filterFun' : filtfilt
})
#Parameters for class ViSAPy.LogBumpFilterBank that sets up
#series of cosine log-bump filters:
logBumpParameters = dict(
n = 16,
taps = 401,
alpha = 0.01,
nyquist=nyquist,
)
#download experimental data for use in generation of noise
fname = os.path.join('data', 'signal_converted.npy')
if RANK == 0:
if not os.path.isdir('data'):
os.mkdir('data')
if not os.path.isfile(fname):
u = urllib2.urlopen('https://www.dropbox.com/s/u6auynymlcbbp36/' +
'signal_converted.npy?dl=1')
f = open(fname, 'w')
f.write(u.read())
f.close()
COMM.Barrier()
#Noise parameters including noise covariance matrix
noiseParameters = None
#extract noise covariances extracted from experimental tetrode recording
noiseFeaturesParameters = dict(logBumpParameters)
noiseFeaturesParameters.update({
'fname' : fname,
'outputfile' : os.path.join(savefolder, 'ViSAPy_noise.h5'),
'T' : 15000,
'srate_in' : 20000,
'srate_out' : 2 * nyquist,
'NFFT' : 2**16,
'psdmethod': 'mlab',
'remove_spikes' : True,
#parameters passed to class SpikeACut, only used if remove_spikes == True
'remove_spikes_args' : {
'TEMPLATELEN' : 32,
'TEMPLATEOFFS' : 0.5,
'threshold' : 5, #standard deviations
'data_filter' : {
'filter_design' : butter,
'filter_design_args' : {
'N' : 2,
'Wn' : np.array([300., 5000.]) / nyquist,
'btype' : 'pass',
},
'filter' : filtfilt,
},
},
'amplitude_scaling' : 1E-3,
})
#container file for noise output etc.
noise_output_file = os.path.join(savefolder, 'ViSAPy_noise.h5')
################################################################################
## MAIN
################################################################################
################################################################################
## Step 1: Estimate PSD and covariance between channels, here using
## an experimental dataset.
##
## In the present ViSAPy, we should use only a single RANK for this
## and subsequent steps, we also skip regenerating noise and spike
## events, because it can take some time for long simulation durations
##
if RANK == 0:
if not os.path.isfile(noise_output_file):
noise_features = ViSAPy.NoiseFeatures(**noiseFeaturesParameters)
################################################################################
## Step 2: Generate synthetic noise with PSD and covariance channels extracted
## using class NoiseFeatures, preserving the overall amplitude.
## We choose to save directly to file, as it will be used in
## later steps
##
noise_generator = ViSAPy.CorrelatedNoise(psd=noise_features.psd,
C=noise_features.C,
**noiseFeaturesParameters)
#file object containing extracellular noise and related data
f = h5py.File(noise_output_file)
f['data'] = noise_generator.correlated_noise(T = cellParameters['tstopms'])
f.close()
#sync
COMM.Barrier()
################################################################################
## Step 3: Fix seed and set up Testdata object, generating a model cell
## population, find and distribute synapse inputs with spiketrains from
## network, run simulations for extracellular potentials,
## collect data and generate final benchmark data
##
np.random.seed(POPULATIONSEED)
benchmark_data = MoI.BenchmarkDataMoI(
cellParameters = cellParameters,
morphologies = morphologies,
defaultrotation = defaultrotation,
simulationParameters = simulationParameters,
populationParameters = populationParameters,
electrodeParameters = electrodeParameters,
noiseFile = noise_output_file,
filters = filters,
savefolder = savefolder,
default_h5_file = 'lfp_cell_%.3i.h5',
nPCA = 2,
TEMPLATELEN = 80,
TEMPLATEOFFS = 0.3,
spikethreshold = 3.,
custom_codes = custom_codes,
paramsMapping = paramsMapping,
paramsMoI = paramsMoI,
gsynParams = gsynParams,
)
print 'setup ok!'
benchmark_data.run()
print 'run ok'
benchmark_data.collect_data()
print 'collect ok'
#plot single cell output
myplot = ViSAPy.plotBenchmarkData(benchmark_data)
for i in range(populationParameters['POPULATION_SIZE']):
if i % SIZE == RANK:
fig = myplot.plot_figure_13(cellindices=np.array([i]),
bins=10**np.linspace(np.log10(10), np.log10(1E3), 67))
fig.savefig(os.path.join(savefolder, 'cell_%.2i.pdf' % i))
plt.close(fig)
COMM.Barrier()
| gpl-2.0 | -3,942,631,519,938,643,000 | 29.164921 | 90 | 0.567734 | false | 3.439701 | false | false | false |
kfsone/tinker | python/restful.py | 1 | 6005 | # Copyright (C) 2017 Oliver "kfsone" Smith <[email protected]>
# Provided under The MIT License -- see LICENSE.
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import requests
from utilities import join_uri_paths
"""
A simple wrapper for a RESTful api, which returns the parsed json response
as an object.
Example:
# Request the post with id #1 and the comments for it at
# jsonplaceholder.typicode.com
api = RESTful("jsonplaceholder.typicode.com", protocol="https")
# Get a list of posts as dictionaries ( [{...},...] )
posts = api.query("/posts")
# Get post id #1 as a dictionary of properties ( {'body':..., ...})
post = api.query("/posts/1")
# Get the comments on post #1 as a list of dictionaries.
cmts = api.query("/posts/1/comments") # => ditto
# Simulate posting our own post.
post = api.query("/posts", body={"userId":123, "title":"Test Post",
"body":"This is a test, just a test."})
For some interfaces you will need to use 'put' rather than 'post', in which
case there is an "update" rather than "query" member:
post = api.update("/posts", body={"userId":123, "title":"Test Post"})
You can use "prefix" to create API-zoned objects:
class PostsAPI(RESTful):
DEFAULT_PROTOCOL = "https"
DEFAULT_PREFIX = "/posts"
posts = PostsAPI("jsonplaceholder.typicode.com")
posts.query() # Returns list of posts
posts.query("1") # Returns /posts/1 => dictionary of post 1 properties
"""
###############################################################################
#
class RESTful(object):
DEFAULT_PREFIX = "/"
DEFAULT_PROTOCOL = "http"
FETCH = requests.get
CREATE = requests.post
UPDATE = requests.put
###########################################################################
# Constructor.
#
def __init__(self, address, protocol=None, prefix=None):
"""
Construct a restful API instance to the given address,
assuming the API base is "http://{address}/" by default.
\param address The name or IP of the service,
\param protocol [optional] Protocol to speak (default=http),
\param prefix [optional] Top-level of the API (default=/),
"""
if protocol is None:
protocol = self.DEFAULT_PROTOCOL
if prefix is None:
prefix = self.DEFAULT_PREFIX
self._base_url = "%s://%s" % (protocol, address)
self._prefix = prefix
###########################################################################
# Helper to translate sub-paths into complete paths with a protocol.
#
def _get_request_path(self, query_path):
"""
Internal helper than translates a query path into a request path.
\param query_path The path the user is providing,
\return The complete path to request.
"""
if not query_path:
request_path = self._prefix
elif not self._prefix or query_path.startswith(self._prefix):
# When there's no prefix or the incoming path included it,
# just use the query path.
request_path = query_path
elif query_path.startswith("./"):
# If prefix="/api/" and you want "/api/api/foo", use "./api/foo"
# or "/api/api/foo".
request_path = join_uri_paths(self._prefix, query_path[2:])
else:
# Otherwise, we inject the prefix.
request_path = join_uri_paths(self._prefix, query_path)
# Introduce the protocol and address.
return join_uri_paths(self._base_url, request_path)
###########################################################################
# Make an actual query.
#
def query(self, query_path=None, body=None):
"""
Send a query within the API using GET or, if the optional body is
provided, via POST. The body can either be a list or dictionary to be
sent as JSON, anything else will be sent as-is as the 'data' field of
the post.
If you are using an API which requests you to use "PUT" you should use
the 'update' method instead.
`prefix` is automatically added unless the query_path includes it,
e.g., given r = RESTful("host", prefix="/api/")
r.query("/foo")
r.query("/api/foo")
are equivalent. Use "./" to avoid this, e.g.
r.query("./api/foo") => /api/api/foo
\param query_path The prefix-relative path to the query,
\param body a: dict{ parameters },
b: string representation of data to be sent,
\return JSON representation of the response,
"""
# Translate the query path to a request
request = self._get_request_path(query_path)
# If body is None, just use get:
if body is None:
response = self.FETCH(request)
# If we're given a dictionary or list, automatically convert it to a
# JSON representation.
elif isinstance(body, (dict, list, tuple)):
response = self.CREATE(request, json=body)
else:
response = self.CREATE(request, data=body)
return response.json()
###########################################################################
# Perform a "put" operation to update existing data.
#
def update(self, query_path=None, body=None):
"""
Like 'query' but uses 'put' as the transport method.
"""
request = self._get_request_path(query_path)
if not body or isinstance(body, (dict, list, tuple)):
response = self.UPDATE(request, json=body)
else:
response = self.UPDATE(request, data=body)
return response.json()
| mit | -5,847,487,410,513,796,000 | 32.176796 | 79 | 0.561865 | false | 4.280114 | false | false | false |
alfeliz/ALEX-scope | tektronik.py | 1 | 8334 | #Version 18-03-2019
import os #Folder and files managment
import csv #To make the nice CSV output.
import re #Regular expresions use.
import numpy as np #Numerical work in Python. Yeah!!!
import scipy.integrate as inte #Numerical integration. YOU NEED TO INSTALL THE SCIPY PACKAGE.
#~ import matplotlib.pyplot as plt
import Gpib #Gpib module import everything. there is no overlap.
# (http://stackoverflow.com/questions/710551/import-module-or-from-module-import)
import time #timing handling
###############################
###FUNCTIONS:
###############################
#To remove similar values in a chain:
#From: https://www.peterbe.com/plog/uniqifiers-benchmark
#~ def rem_repeat(seq, idfun=None):
#~ # order preserving
#~ if idfun is None:
#~ def idfun(x): return x
#~ seen = {}
#~ result = []
#~ for item in seq:
#~ marker = idfun(item)
#~ # in old Python versions:
#~ # if seen.has_key(marker)
#~ # but in new ones:
#~ if marker in seen: continue
#~ seen[marker] = 1
#~ result.append(item)
#~ return result
#Defining a function to save the channels info:
def chansave(channel,chandata):
if not os.path.isfile(channel): #When the file does not exists.
with open(channel, 'w') as arch:
writer = csv.writer(arch, delimiter="\t", quotechar=" ")
#8 decimals:
writer.writerows(map(lambda t: ("%.8e" % t[0], "%.8e" % t[1]), chandata))
#~ writer.writerows(chandata)
else: #the file exists:
with open("01-"+channel, 'w') as arch:
writer = csv.writer(arch, delimiter="\t", quotechar=" ")
#8 decimals:
writer.writerows(map(lambda t: ("%.8e" % float(t[0]), "%.8e" % float(t[1])), chandata))
return[0]
def transf(signal, device):
time = []
volts = []
for x in range(0,len(signal)):
time.append(signal[x][0])
volts.append(signal[x][1])
time = np.array(time)
volts = np.array(volts)
#REMOVE HERE THE BASELINE OF THE SCOPES.
if "2Rog" in device:
#Multiplying to obtain the A/s:
der_curr = volts*5671000000.00 #Rogowsky gives current derivative. Updated value for forth time
result = np.column_stack((time,der_curr))
elif "DI03" in device: #New voltage divider
volt_div03 = 11068*volts #
result = np.column_stack((time,volt_div03))
elif "DI04" in device: #New voltage divider
volt_div04 = 6930*volts #
result = np.column_stack((time,volt_div04))
elif "2Res" in device:
volt_div2 = 1359*volts #Updated value for second time
result = np.column_stack((time,volt_div2))
elif "3Res" in device:
volt_div3 = 2400*volts #Updated value for second time
result = np.column_stack((time,volt_div3))
elif "Phot" in device:
#Normalizing to 1:
phot = volts/max(volts)
result = np.column_stack((time,phot))
elif "Curr" in device: #Updated value for second time
curr_time = volts * 51780
result = np.column_stack((time,curr_time))
elif "None" in device: #"No device" attached to the scope.
result = np.column_stack((time,volts))
return[result]
def takechan(channel,sleeptime,addr):
#sleeptime = 0.030
#addr = 3 #Gpib address of the scope
scope = Gpib.Gpib(0,addr)
scope.write("*IDN?") #Identify the scope
time.sleep(sleeptime)
scope_type = scope.read(3)
if scope_type == "TEK": #tektronik scopes
scope.write("HEADER OFF") #Don't give headers of data with the data.
time.sleep(sleeptime)
scope.write("DATA:WIDTH 1")
time.sleep(sleeptime)
scope.write("DATA:ENCDG ASCII") #1 byte for voltage data and ASCII format.
time.sleep(sleeptime)
selectchan = "SELECT:"+channel+" ON"
datachan = "DATA:SOURCE "+channel
#SELECTING CHANNEL:
scope.write(selectchan) #Set the channel to show, if was not it will not record the data...
time.sleep(sleeptime)
scope.write(datachan) #Set the channel source to Channel datachan.
time.sleep(sleeptime)
#CHANNEL ASIGNMENT CHECKING
scope.write("DATA:SOURCE?") #Ask for data channel source.
time.sleep(sleeptime)
CHAN = scope.read(3)
if CHAN != channel:
print("Error: Channel not correctly assigned.")
print(CHAN, datachan)
raise SystemExit #Go out. all wrong.
#WAVEFORM PREAMBLE (ALL INFO OVER DATA)
scope.write("WFMPRE?")
time.sleep(sleeptime)
preamble = scope.read(256).split(";")
#preamble = preamble.split(";")
#USE OF PREAMBLE INFO. PUT INFO IN NICE VARIABLES.
points = int(preamble[5])
ymult = float(preamble[12])
yzero = float(preamble[13])
yoff = int(float(preamble[14])) #Not measured, but stablished. Let's remove it...
#WAVEFORM VOLTS/DIV SCALE:
text = channel + ":SCALE?"
scope.write(text)
time.sleep(sleeptime)
Volt = float(scope.read())
print("Reading data from channel {!s}...".format(CHAN))
#WAVEFORM DATA: (FINALLY)
scope.write("CURVE?")
time.sleep(sleeptime)
curve = scope.read(16000).split(",")
if curve[len(curve)-1] == "": #Avoiding strange numbers...
curve[len(curve)-1] = "0"
print("Reading finished...")
#Waveform transformation into real volts:
#The rounding to 2 ciphers is important to avoid the use of
#garbage bits apperaing in the digitazing process from the computer.
# As now no integration is necessary, 10 cyphers are used.
CH_curve = [round((int(x) - yoff)*ymult,10) for x in curve]
#CREATING TIME VECTOR:
t =[]
scope.write("WFMPRE:XINCR?")
time.sleep(sleeptime)
sweep=float(scope.read())
for n in range(len(CH_curve)):
t.append(float(n)*sweep)
CH_curve = zip(t,CH_curve)
CH_error = ymult/Volt
else: #Lecroy scope. Its label is shit.
scope.write('DTFORM ASCII') #ASCII format for the data.
time.sleep(sleeptime)
scope.write('WAVESRC '+channel) #Selecting channel for waveform download.
time.sleep(sleeptime)
scope.write('DTINF?') #reading information of the scope and waveform setup.
time.sleep(sleeptime)
preamble = scope.read(550).split(",")
#Determining the number of points to be read in the waveform(Memory Length)
points = preamble[23][16:] #text, not number!!!!
#Determining the time division:
t_sweep = ( convlecroytime(preamble[20][11:])/float(points) )*10
#Passing them to the scope:
scope.write('DTPOINTS '+points)
time.sleep(sleeptime)
#Determining the scaling and offset of the channel:
if channel == 'CH1':
CH_scale = convlecroyscale(preamble[4][12:]) #This is a number
CH_offset =convlecroyscale(preamble[5][9:])
elif channel == 'CH2':
CH_scale = convlecroyscale(preamble[8][12:])
CH_offset =convlecroyscale(preamble[9][9:])
elif channel == 'CH3':
CH_scale = convlecroyscale(preamble[12][12:])
CH_offset =convlecroyscale(preamble[13][9:])
elif channel == 'CH4':
CH_scale = convlecroyscale(preamble[16][12:])
CH_offset =convlecroyscale(preamble[17][9:])
print("Reading data from channel {!s}...".format(channel))
scope.write('DTWAVE?')
time.sleep(sleeptime)
wave_ascii = scope.read(8*int(points)).split(",") #It reads bites transformed in BYTES...
wave_number = [float(number) for number in wave_ascii]
volts = [ round( ( ((float(number) / 256 / 32 ) * CH_scale ) - CH_offset ),12) for number in wave_ascii]
#Making the time vector:
t =[] #It's a list
for i in range(len(volts)):
t.append(float(i)*t_sweep)
CH_curve = zip(t,volts) #List of tuples.
CH_error = CH_scale
return(CH_curve, CH_error, preamble)
def readTekScreen(adrr,sleeptime):
scope = Gpib.Gpib(0,adrr)
scope.write('HARDCOPY START')
time.sleep(sleeptime)
raw_data = scope.read(80000) #Minimun number to obtain the full picture
return raw_data
def readLECScreen(adrr,sleeptime):
scope = Gpib.Gpib(0,adrr)
scope.write('TSCRN? BMP')
time.sleep(sleeptime)
raw_data = scope.read(330000) #Minimun number to obtain the full picture
return raw_data[10:] #It is necessary to remove the first byte, as it is no data.
def convlecroyscale(scale_text):
value = float(re.findall(r'[+-]?[0-9.]+',scale_text)[0]) #Hopefully the scale in volts
if re.findall(r'[V-mV]+',scale_text)[0] == 'mV':
value = value * 1e-3
return value
def convlecroytime(scale_time):
value = float(re.findall(r'[+-]?[0-9.]+',scale_time)[0]) #time scale number
if re.findall(r'[s,ms,us,ns]+',scale_time)[0] == 'ms':
value = value * 1e-3
elif re.findall(r'[s,ms,us,ns]+',scale_time)[0] == 'us':
value = value * 1e-6
elif re.findall(r'[s,ms,us,ns]+',scale_time)[0] == 'ns':
value = value * 1e-9
return value
| gpl-3.0 | -2,498,187,895,155,513,300 | 33.725 | 107 | 0.674826 | false | 2.879751 | false | false | false |
burke-software/BSC-website | wiki/plugins/notifications/management/commands/wiki_notifications_create_defaults.py | 1 | 1907 | # -*- coding: utf-8 -*-
from optparse import make_option
from django.core.management.base import BaseCommand
from django.utils import translation
class Command(BaseCommand):
args = '[file-name.csv]'
help = 'Import and parse messages directly from a CSV file.' # @ReservedAssignment
def handle(self, *args, **options):
from django.conf import settings
translation.activate(settings.LANGUAGE_CODE)
from django.contrib.auth import get_user_model
from wiki.plugins.notifications import models
from wiki.plugins.notifications.settings import ARTICLE_EDIT
from wiki.models import Article
from django_nyt.utils import subscribe
from django_nyt.models import Settings
from django.contrib.contenttypes.models import ContentType
# User: Settings
settings_map = {}
def subscribe_to_article(article, user):
if user not in settings_map:
settings_map[user], __ = Settings.objects.get_or_create(user=user)
return subscribe(settings_map[user], ARTICLE_EDIT, content_type=ContentType.objects.get_for_model(article), object_id=article.id)
for article in Article.objects.all():
if article.owner:
subscription = subscribe_to_article(article, article.owner)
models.ArticleSubscription.objects.get_or_create(article=article, subscription=subscription)
for revision in article.articlerevision_set.exclude(user=article.owner).exclude(user=None).values('user').distinct():
user = get_user_model().objects.get(id=revision['user'])
subscription = subscribe_to_article(article, user)
models.ArticleSubscription.objects.get_or_create(article=article, subscription=subscription)
translation.deactivate() | gpl-3.0 | -8,975,310,130,230,468,000 | 45.536585 | 141 | 0.66387 | false | 4.60628 | false | false | false |
lynnlyc/droidbot | droidbot/utg.py | 1 | 9339 | import logging
import json
import os
import random
import datetime
import networkx as nx
from .utils import list_to_html_table
class UTG(object):
"""
UI transition graph
"""
def __init__(self, device, app, random_input):
self.logger = logging.getLogger(self.__class__.__name__)
self.device = device
self.app = app
self.random_input = random_input
self.G = nx.DiGraph()
self.effective_event_strs = set()
self.ineffective_event_strs = set()
self.explored_state_strs = set()
self.reached_state_strs = set()
self.reached_activities = set()
self.first_state_str = None
self.last_state_str = None
self.last_transition = None
self.effective_event_count = 0
self.input_event_count = 0
self.start_time = datetime.datetime.now()
def add_transition(self, event, old_state, new_state):
self.add_node(old_state)
self.add_node(new_state)
# make sure the states are not None
if not old_state or not new_state:
return
event_str = event.get_event_str(old_state)
self.input_event_count += 1
if old_state.state_str == new_state.state_str:
self.ineffective_event_strs.add(event_str)
# delete the transitions including the event from utg
for new_state_str in self.G[old_state.state_str]:
if event_str in self.G[old_state.state_str][new_state_str]["events"]:
self.G[old_state.state_str][new_state_str]["events"].pop(event_str)
if event_str in self.effective_event_strs:
self.effective_event_strs.remove(event_str)
return
self.effective_event_strs.add(event_str)
self.effective_event_count += 1
if (old_state.state_str, new_state.state_str) not in self.G.edges():
self.G.add_edge(old_state.state_str, new_state.state_str, events={})
self.G[old_state.state_str][new_state.state_str]["events"][event_str] = {
"event": event,
"id": self.effective_event_count
}
self.last_state_str = new_state.state_str
self.last_transition = (old_state.state_str, new_state.state_str)
self.__output_utg()
def add_node(self, state):
if not state:
return
if state.state_str not in self.G.nodes():
state.save2dir()
self.G.add_node(state.state_str, state=state)
if self.first_state_str is None:
self.first_state_str = state.state_str
if state.foreground_activity.startswith(self.app.package_name):
self.reached_activities.add(state.foreground_activity)
def __output_utg(self):
"""
Output current UTG to a js file
"""
if not self.device.output_dir:
return
utg_file_path = os.path.join(self.device.output_dir, "utg.js")
utg_file = open(utg_file_path, "w")
utg_nodes = []
utg_edges = []
for state_str in self.G.nodes():
state = self.G.nodes[state_str]["state"]
package_name = state.foreground_activity.split("/")[0]
activity_name = state.foreground_activity.split("/")[1]
short_activity_name = activity_name.split(".")[-1]
state_desc = list_to_html_table([
("package", package_name),
("activity", activity_name),
("state_str", state.state_str),
("structure_str", state.structure_str)
])
utg_node = {
"id": state_str,
"shape": "image",
"image": os.path.relpath(state.screenshot_path, self.device.output_dir),
"label": short_activity_name,
# "group": state.foreground_activity,
"package": package_name,
"activity": activity_name,
"state_str": state_str,
"structure_str": state.structure_str,
"title": state_desc,
"content": "\n".join([package_name, activity_name, state.state_str, state.search_content])
}
if state.state_str == self.first_state_str:
utg_node["label"] += "\n<FIRST>"
utg_node["font"] = "14px Arial red"
if state.state_str == self.last_state_str:
utg_node["label"] += "\n<LAST>"
utg_node["font"] = "14px Arial red"
utg_nodes.append(utg_node)
for state_transition in self.G.edges():
from_state = state_transition[0]
to_state = state_transition[1]
events = self.G[from_state][to_state]["events"]
event_short_descs = []
event_list = []
for event_str, event_info in sorted(iter(events.items()), key=lambda x: x[1]["id"]):
event_short_descs.append((event_info["id"], event_str))
if self.device.adapters[self.device.minicap]:
view_images = ["views/view_" + view["view_str"] + ".jpg"
for view in event_info["event"].get_views()]
else:
view_images = ["views/view_" + view["view_str"] + ".png"
for view in event_info["event"].get_views()]
event_list.append({
"event_str": event_str,
"event_id": event_info["id"],
"event_type": event_info["event"].event_type,
"view_images": view_images
})
utg_edge = {
"from": from_state,
"to": to_state,
"id": from_state + "-->" + to_state,
"title": list_to_html_table(event_short_descs),
"label": ", ".join([str(x["event_id"]) for x in event_list]),
"events": event_list
}
# # Highlight last transition
# if state_transition == self.last_transition:
# utg_edge["color"] = "red"
utg_edges.append(utg_edge)
utg = {
"nodes": utg_nodes,
"edges": utg_edges,
"num_nodes": len(utg_nodes),
"num_edges": len(utg_edges),
"num_effective_events": len(self.effective_event_strs),
"num_reached_activities": len(self.reached_activities),
"test_date": self.start_time.strftime("%Y-%m-%d %H:%M:%S"),
"time_spent": (datetime.datetime.now() - self.start_time).total_seconds(),
"num_input_events": self.input_event_count,
"device_serial": self.device.serial,
"device_model_number": self.device.get_model_number(),
"device_sdk_version": self.device.get_sdk_version(),
"app_sha256": self.app.hashes[2],
"app_package": self.app.package_name,
"app_main_activity": self.app.main_activity,
"app_num_total_activities": len(self.app.activities),
}
utg_json = json.dumps(utg, indent=2)
utg_file.write("var utg = \n")
utg_file.write(utg_json)
utg_file.close()
def is_event_explored(self, event, state):
event_str = event.get_event_str(state)
return event_str in self.effective_event_strs or event_str in self.ineffective_event_strs
def is_state_explored(self, state):
if state.state_str in self.explored_state_strs:
return True
for possible_event in state.get_possible_input():
if not self.is_event_explored(possible_event, state):
return False
self.explored_state_strs.add(state.state_str)
return True
def is_state_reached(self, state):
if state.state_str in self.reached_state_strs:
return True
self.reached_state_strs.add(state.state_str)
return False
def get_reachable_states(self, current_state):
reachable_states = []
for target_state_str in nx.descendants(self.G, current_state.state_str):
target_state = self.G.nodes[target_state_str]["state"]
reachable_states.append(target_state)
return reachable_states
def get_event_path(self, current_state, target_state):
path_events = []
try:
states = nx.shortest_path(G=self.G, source=current_state.state_str, target=target_state.state_str)
if not isinstance(states, list) or len(states) < 2:
self.logger.warning("Error getting path from %s to %s" %
(current_state.state_str, target_state.state_str))
start_state = states[0]
for state in states[1:]:
edge = self.G[start_state][state]
edge_event_strs = list(edge["events"].keys())
if self.random_input:
random.shuffle(edge_event_strs)
path_events.append(edge["events"][edge_event_strs[0]]["event"])
start_state = state
except Exception as e:
print(e)
self.logger.warning("Cannot find a path from %s to %s" % (current_state.state_str, target_state.state_str))
return path_events
| mit | -1,007,031,879,412,044,400 | 38.075314 | 119 | 0.545347 | false | 3.764208 | false | false | false |
wtsi-hgi/CoGS-Webapp | cogs/common/constants.py | 1 | 2559 | """
Copyright (c) 2017 Genome Research Ltd.
Authors:
* Christopher Harrison <[email protected]>
This program is free software: you can redistribute it and/or modify it
under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
import os.path
from enum import Enum
from typing import List
# Standard permissions
PERMISSIONS:List[str] = [
"modify_permissions", # Can modify permissions
"create_project_groups", # Can create rotations
"set_readonly", # Can set project groups read-only
"create_projects", # Can create projects
"review_other_projects", # Can review other projects
"join_projects", # Can join projects
"view_projects_predeadline", # Can view projects before they're visible to students
"view_all_submitted_projects" # Can view all submitted projects
]
# Rotation e-mail invitation template IDs, for students and supervisors
ROTATION_TEMPLATE_IDS:List[str] = [
"student_invite_1", # Student invite for rotation 1
"student_invite_2", # ...for rotation 2
"student_invite_3", # ...for rotation 3
"supervisor_invite_1", # Supervisor invite for rotation 1
"supervisor_invite_2", # ...for rotation 2
"supervisor_invite_3" # ...for -- wait for it! -- rotation 3
]
# Absolute path of the job hazard form
# FIXME? Is this the appropriate place to put this?
JOB_HAZARD_FORM:str = os.path.normpath(
os.path.join(
os.path.dirname(__file__),
"..", "..", "static", "new_starter_health_questionnaire_jun_17.docx"))
# Maximum number of Excel rows to export
# FIXME? Is this the appropriate place to put this?
MAX_EXPORT_LINE_LENGTH:int = 30
# Sanger science programmes
PROGRAMMES:List[str] = [
"Cancer, Ageing and Somatic Mutation",
"Cellular Genetics",
"Human Genetics",
"Parasites and Microbes"
]
# Grades used in marking, with description
class GRADES(Enum):
A = "Excellent"
B = "Good"
C = "Satisfactory"
D = "Fail"
| agpl-3.0 | 103,027,100,164,038,820 | 35.042254 | 89 | 0.682298 | false | 3.763235 | false | false | false |
jrising/research-common | python/ripping.py | 1 | 1455 | import os, urllib2, csv
from bs4 import BeautifulSoup
from bs4.element import Tag
def get_soup(urlreq):
if type(urlreq) is str:
print urlreq
# Open the URL
reader = urllib2.urlopen(urlreq, timeout=30)
# Parse the HTML
return BeautifulSoup(reader, "lxml")
# Find all elements of a given kind of tag in the soup
def find_tags(soup, tagname, recurse_match=False):
"""Find all tags in the soup that have the given tag name (e.g., all <a> tags for links)."""
matches = [] # all tags found
check_tag = lambda name: name == tagname
if isinstance(tagname, list):
check_tag = lambda name: name in tagname
# Iterate through all children that are instances of the Tag class
for child in soup.children:
if isinstance(child, Tag):
# If this is our tag, add it; otherwise, recurse!
if hasattr(child, 'name') and check_tag(child.name):
matches.append(child)
if recurse_match:
matches.extend(find_tags(child, tagname, recurse_match=recurse_match))
else:
matches.extend(find_tags(child, tagname, recurse_match=recurse_match))
# Return the tags
return matches
# Extract tags with a given class
def find_tags_class(soup, tagname, clsname):
tags = find_tags(soup, tagname, recurse_match=True)
return [tag for tag in tags if tag.has_attr('class') and tag['class'][0] == clsname]
| mit | -7,632,673,079,982,886,000 | 33.642857 | 96 | 0.646735 | false | 3.900804 | false | false | false |
avalentino/gsdview | gsdview/gdalbackend/gdalexectools.py | 1 | 14797 | # GSDView - Geo-Spatial Data Viewer
# Copyright (C) 2008-2021 Antonio Valentino <[email protected]>
#
# This module is free software you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation either version 2 of the License, or
# (at your option) any later version.
#
# This module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this module if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 US
"""Custom exectools components for GDAL."""
import re
import logging
import exectools
from exectools.qt import QtOutputHandler
from osgeo import gdal
_log = logging.getLogger(__name__)
class BaseGdalToolDescriptor(exectools.ToolDescriptor):
"""Base class for GDAL tool descriptors."""
def gdal_config_options(self, cmd=''):
extra_args = []
if 'GDAL_CACHEMAX' not in cmd:
value = gdal.GetCacheMax()
extra_args.extend(('--config', 'GDAL_CACHEMAX', str(value)))
for key in ('CPL_DEBUG', 'GDAL_SKIP', 'GDAL_DATA',
'GDAL_DRIVER_PATH', 'OGR_DRIVER_PATH'):
if key not in cmd:
value = gdal.GetConfigOption(key, None)
if value:
extra_args.extend(('--config', key, '"%s"' % value))
return extra_args
def cmdline(self, *args, **kwargs):
parts = super().cmdline(*args, **kwargs)
extra_args = self.gdal_config_options(parts)
if extra_args:
if not self.executable or isinstance(self.executable, str):
parts = [parts[0]] + extra_args + parts[1:]
else:
executable = list(self.executable)
parts = executable + extra_args + parts[len(executable):]
return parts
class GdalAddOverviewDescriptor(BaseGdalToolDescriptor):
"""Tool descriptor for the gdaladdo utility program."""
#: resampling methods
RESAMPLING_METHODS = [
'nearest',
'average',
'gauss',
'cubic',
# 'cubicspline', # GDAL 2.0
# 'lanczos', # GDAL 2.0
'average_mp',
'average_magphase',
'mode',
]
if gdal.VersionInfo() > '2000000':
RESAMPLING_METHODS.extend((
'cubicspline',
'lanczos',
))
#: TIFF compression methods
TIFF_COMPRESSION_METHODS = (
'JPEG',
'LZW',
'PACKBITS',
'DEFLATE',
)
#: TIFF interleaving methods
TIFF_INTERLEAVING_METHODS = ('PIXEL', 'BAND')
#: Allowed options for BigTIFF flag
TIFF_USE_BIGTIFF_MODE = ('IF_NEEDED', 'IF_SAFER', 'YES', 'NO')
def __init__(self, cwd=None, env=None,
stdout_handler=None, stderr_handler=None):
"""Initialization:
:param cwd:
program working directory
:param env:
environment dictionary
:param envmerge:
if set to True (default) it is the :attr:`env` dictionaty is
used to update the system environment
:param stdout_handler:
*OutputHandler* for the stdout of the tool
:param stderr_handler:
*OutputHandler* for the stderr of the tool
.. seealso:: :class:`exectools.BaseOutputHandler`
"""
super().__init__('gdaladdo', [], cwd, env, stdout_handler,
stderr_handler)
#: ensure that gdaladdo works in readonly mode
self.readonly = False
self._resampling_method = 'average'
#: use Erdas Imagine format (.aux) as overview format.
#: If None use GDAL defaults.
self.use_rrd = None
#: photometric interpretation: RGB, YCBCR, etc. (only for external
#: overviews in GeoTIFF format).
#: If None use GDAL defaults.
self.photometric_interpretation = None
self._compression_method = None
self._interleaving_method = None
self._use_bigtiff_mode = None
def resampling_method(self):
"""Resampling method for overviews computation."""
return self._resampling_method
def set_resampling_method(self, method):
'''Set the resampling method for overviews computation.
If set to None use GDAL defaults.
Available resampling methods: %s.
''' % ', '.join(GdalAddOverviewDescriptor.RESAMPLING_METHODS)
if method is not None and method not in self.RESAMPLING_METHODS:
raise ValueError(
'invalid resampling method: "%s". '
'Available methods are: %s' % (
method, ', '.join(self.RESAMPLING_METHODS)))
self._resampling_method = method
def compression_method(self):
"""TIFF compression method.
This attribute is only used if external overviews are
stored in GeoTIFF format.
"""
return self._compression_method
def set_compression_method(self, method):
'''Set the TIFF compression method.
This attribute is only used if external overviews are
stored in GeoTIFF format.
If set to None use GDAL defaults.
Available compression methods: %s.
''' % ', '.join(GdalAddOverviewDescriptor.TIFF_COMPRESSION_METHODS)
self._compression_method = method
def interleaving_method(self):
'''Overviews interleaving method (%s).
This attribute is only used if external overviews are
stored in GeoTIFF format.
''' % ' or '.join(GdalAddOverviewDescriptor.TIFF_INTERLEAVING_METHODS)
return self._interleaving_method
def set_interleaving_method(self, method):
'''Set the overview interleaving method.
This attribute is only used if external overviews are
stored in GeoTIFF format.
If set to None use GDAL defaults.
Possible interleaving methods are: %s.
''' % ' or '.join(GdalAddOverviewDescriptor.TIFF_INTERLEAVING_METHODS)
self._interleaving_method = method
def use_bigtiff_mode(self):
'''Mode of using BigTIFF in overviews (%s).
This attribute is only used if external overviews are
stored in GeoTIFF format.
''' % ' or '.join(GdalAddOverviewDescriptor.TIFF_USE_BIGTIFF_MODE)
return self._use_bigtiff_mode
def set_use_bigtiff_mode(self, mode):
'''Set the mode of using BigTIFF in overviews.
This attribute is only used if external overviews are
stored in GeoTIFF format.
If set to None use GDAL defaults.
Possible interleaving methods are: %s.
''' % ' or '.join(GdalAddOverviewDescriptor.TIFF_USE_BIGTIFF_MODE)
self._use_bigtiff_mode = mode
def gdal_config_options(self, cmd=''):
extra_args = super().gdal_config_options(cmd)
if self.use_rrd is not None and 'USE_RRD' not in cmd:
if self.use_rrd:
value = 'YES'
else:
value = 'NO'
extra_args.extend(('--config', 'USE_RRD', value))
if (self.photometric_interpretation is not None
and 'PHOTOMETRIC_OVERVIEW' not in cmd):
extra_args.extend(('--config', 'PHOTOMETRIC_OVERVIEW',
self.photometric_interpretation))
if (self._compression_method is not None
and 'COMPRESS_OVERVIEW' not in cmd):
extra_args.extend(('--config', 'COMPRESS_OVERVIEW',
self._compression_method))
if (self._interleaving_method is not None
and 'INTERLEAVE_OVERVIEW' not in cmd):
extra_args.extend(('--config', 'INTERLEAVE_OVERVIEW',
self._interleaving_method))
if (self._use_bigtiff_mode is not None
and 'BIGTIFF_OVERVIEW' not in cmd):
extra_args.extend(('--config', 'BIGTIFF_OVERVIEW',
self._use_bigtiff_mode))
return extra_args
def cmdline(self, *args, **kwargs):
args = list(args)
if self._resampling_method is not None and '-r' not in args:
args = ['-r', self._resampling_method] + args
if self.readonly and '-ro' not in args:
args.append('-ro')
return super().cmdline(*args, **kwargs)
class GdalInfoDescriptor(BaseGdalToolDescriptor):
"""Tool descriptor for the gdalinfo utility program."""
def __init__(self, cwd=None, env=None,
stdout_handler=None, stderr_handler=None):
"""
:param cwd:
program working directory
:param env:
environment dictionary
:param envmerge:
if set to True (default) it is the :attr:`env` dictionary is
used to update the system environment
:param stdout_handler:
*OutputHandler* for the stdout of the tool
:param stderr_handler:
*OutputHandler* for the stderr of the tool
.. seealso:: :class:`exectools.BaseOutputHandler`
"""
super().__init__('gdalinfo', [], cwd, env,
stdout_handler, stderr_handler)
#: force computation of the actual min/max values for each band in the
#: dataset.
self.mm = False
#: read and display image statistics. Force computation if no
#: statistics are stored in an image.
self.stats = False
#: report histogram information for all bands.
self.hist = False
#: suppress ground control points list printing. It may be useful for
#: datasets with huge amount of GCPs, such as L1B AVHRR or HDF4 MODIS
#: which contain thousands of the ones.
self.nogcp = False
#: suppress metadata printing. Some datasets may contain a lot of
#: metadata strings.
self.nomd = False
#: suppress raster attribute table printing.
self.norat = False
#: suppress printing of color table.
self.noct = False
#: force computation of the checksum for each band in the dataset.
self.checksum = False
#: report metadata for the specified domain.
self.mdd = None
def cmdline(self, *args, **kwargs):
extra_args = []
for name in ('mm', 'stats', 'hist', 'nogcp', 'nomd', 'norat',
'noct', 'checksum',):
flag = '-%s' % name
if getattr(self, name) and flag not in args:
extra_args.append(flag)
if self.mdd is not None and '-mdd' not in args:
extra_args.extend(('-mdd', self.mdd))
args = extra_args + list(args)
return super().cmdline(*args, **kwargs)
class GdalOutputHandler(QtOutputHandler):
"""Handler for the GDAL simple progress report to terminal.
This progress reporter prints simple progress report to the
terminal window.
The progress report generally looks something like this:
"0...10...20...30...40...50...60...70...80...90...100 - done."
Every 2.5% of progress another number or period is emitted.
.. seealso:: :class:`exectools.BaseOutputHandler`,
:class:`exectools.qt.QtOutputHandler`
"""
def __init__(self, logger=None, statusbar=None, progressbar=None,
blinker=None, **kwargs):
super().__init__(logger, statusbar, progressbar, blinker, **kwargs)
# pattern = (r'(?P<percentage>\d{1,3})|(?P<pulse>\.)|'
# r'((?P<text> - done\.?)$)')
pattern = (r'(?P<percentage>\d{1,3})|(?P<pulse>\.)|'
r'( - (?P<text>done\.?)\n)')
self._progress_pattern = re.compile(pattern)
self._percentage = 0. # @TODO: remove. Set the progressbar maximum
# to 1000 instead.
def handle_progress(self, data):
"""Handle progress data.
:param data:
a list containing an item for each named group in the
"progress" regular expression: (pulse, percentage, text)
for the default implementation.
Each item can be None.
"""
pulse = data.get('pulse')
percentage = data.get('percentage')
# text = data.get('text')
if pulse and percentage is None:
self._percentage = min(100, self._percentage + 2.5)
data['percentage'] = self._percentage
if percentage is not None:
if percentage < self._percentage:
_log.debug(
'new percentage (%d) is lower than previous one (%f)',
percentage, self._percentage)
self._percentage = percentage
# if text and not pulse and percentage is None:
# # reset percentage
# self._percentage = 0.
super().handle_progress(data)
def reset(self):
super().reset()
self._percentage = 0.
if __name__ == '__main__':
def test_GdalOutputHandler_re():
s = '0...10...20...30...40...50...60...70...80...90...100 - done.\n'
h = exectools.BaseOutputHandler(exectools.OFStream())
h._progress_pattern = GdalOutputHandler()._progress_pattern
h.feed(s)
h.close()
print('done.')
def test_GdalOutputHandler1():
s = '0...10...20...30...40...50...60...70...80...90...100 - done.\n'
class C(GdalOutputHandler):
def __init__(self):
exectools.BaseOutputHandler.__init__(self,
exectools.OFStream())
def feed(self, data):
return exectools.BaseOutputHandler.feed(self, data)
def close(self):
return exectools.BaseOutputHandler.close(self)
def reset(self):
return exectools.BaseOutputHandler.reset(self)
def handle_progress(self, data):
return exectools.BaseOutputHandler.handle_progress(self, data)
h = C()
h.feed(s)
h.close()
print('done.')
def test_GdalOutputHandler2():
s = '0...10...20...30...40...50...60...70...80...90...100 - done.\n'
h = exectools.BaseOutputHandler(exectools.OFStream())
h._progress_pattern = GdalOutputHandler()._progress_pattern
for c in s:
h.feed(c)
h.close()
# test_GdalOutputHandler_re()
# test_GdalOutputHandler1()
test_GdalOutputHandler2()
| gpl-2.0 | -403,949,074,283,058,700 | 31.167391 | 78 | 0.582753 | false | 4.157629 | true | false | false |
ganga-devs/ganga | ganga/GangaCore/Lib/Checkers/RootFileChecker.py | 1 | 6863 | ##########################################################################
# Ganga Project. http://cern.ch/ganga
#
##########################################################################
from GangaCore.GPIDev.Adapters.IPostProcessor import PostProcessException
from GangaCore.GPIDev.Adapters.IChecker import IFileChecker
from GangaCore.GPIDev.Schema import FileItem, SimpleItem
from GangaCore.Utility.logging import getLogger
import subprocess
import copy
import os
import re
logger = getLogger()
def SortedValues(adict):
items = sorted(adict.items())
return [value for key, value in items]
def GetKeyNames(f, dir=""):
import ROOT
f.cd(dir)
return [key.GetName() for key in ROOT.gDirectory.GetListOfKeys()]
def GetTreeObjects(f, dir=""):
import ROOT
tree_dict = {}
for tdir in GetKeyNames(f, dir):
if tdir == "":
continue
absdir = os.path.join(dir, tdir)
if isinstance(f.Get(tdir), ROOT.TDirectory):
for absdir, tree in GetTreeObjects(f, absdir).items():
tree_dict[absdir] = tree
if isinstance(f.Get(absdir), ROOT.TTree):
tree_dict[absdir] = f.Get(absdir)
return tree_dict
class RootFileChecker(IFileChecker):
"""
Checks ROOT files to see if they are zombies.
For master job, also checks to see if merging performed correctly.
self.files are the files you would like to check.
self.fileMustExist toggles whether to fail the job if the specified file doesn't exist (default is True).
"""
_schema = IFileChecker._schema.inherit_copy()
_schema.datadict['checkMerge'] = SimpleItem(
defvalue=True, doc='Toggle whether to check the merging proceedure')
_category = 'postprocessor'
_name = 'RootFileChecker'
_exportmethods = ['check']
def checkBranches(self, mastertrees, subtrees):
import ROOT
for masterpath, mastertree in mastertrees.items():
for subpath, subtree in subtrees.items():
if (subpath == masterpath):
subbranches = [branch.GetName()
for branch in subtree.GetListOfBranches()]
masterbranches = [branch.GetName()
for branch in mastertree.GetListOfBranches()]
if (subbranches != masterbranches):
return self.failure
return self.success
def addEntries(self, mastertrees, subtrees, entries_dict):
import ROOT
for masterpath, mastertree in mastertrees.items():
for subpath, subtree in subtrees.items():
if (subpath == masterpath):
if (subpath in entries_dict):
entries_dict[subpath] += subtree.GetEntries()
else:
entries_dict[subpath] = subtree.GetEntries()
return entries_dict
def checkMergeable(self, f):
import ROOT
tf = ROOT.TFile.Open(f)
if tf.IsZombie():
logger.info('ROOT file %s is a zombie, failing job', f)
tf.Close()
return self.failure
if not len(GetKeyNames(tf)):
logger.info('ROOT file %s has no keys, failing job', f)
tf.Close()
return self.failure
tf.Close()
if (os.path.getsize(f) < 330):
logger.info('ROOT file %s has no size, failing job', f)
return self.failure
return self.success
def check(self, job):
"""
Check that ROOT files are not zombies and were closed properly, also (for master job only) checks that the merging performed correctly.
"""
import ROOT
self.result = True
filepaths = self.findFiles(job)
if self.result is False:
return self.failure
if not len(filepaths):
raise PostProcessException(
'None of the files to check exist, RootFileChecker will do nothing!')
for f in filepaths:
if f.find('.root') < 0:
raise PostProcessException('The file "%s" is not a ROOT file, RootFileChecker will do nothing!' % os.path.basename(f))
if not self.checkMergeable(f):
return self.failure
if (len(job.subjobs) and self.checkMerge):
haddoutput = f + '.hadd_output'
if not os.path.exists(haddoutput):
logger.warning('Hadd output file %s does not exist, cannot perform check on merging.', haddoutput)
return self.success
for failString in ['Could not find branch', 'One of the export branches', 'Skipped file']:
grepoutput = subprocess.getoutput('grep "%s" %s' % (failString, haddoutput))
if len(grepoutput):
logger.info('There was a problem with hadd, the string "%s" was found. Will fail job', failString)
return self.failure
tf = ROOT.TFile.Open(f)
mastertrees = GetTreeObjects(tf)
entries_dict = {}
for sj in job.subjobs:
if (sj.status == 'completed'):
for subfile in self.findFiles(sj):
if (os.path.basename(subfile) == os.path.basename(f)):
subtf = ROOT.TFile.Open(subfile)
subtrees = GetTreeObjects(subtf)
substructure = sorted(subtrees.keys())
masterstructure = sorted(mastertrees.keys())
if (substructure != masterstructure):
logger.info('File structure of subjob %s is not the same as master job, failing job', sj.fqid)
return self.failure
if not self.checkBranches(mastertrees, subtrees):
logger.info('The tree structure of subjob %s is not the same as merged tree, failing job', sj.fqid)
return self.failure
entries_dict = self.addEntries(
mastertrees, subtrees, entries_dict)
subtf.Close()
master_entries_dict = dict(
(n, mastertrees[n].GetEntries()) for n in set(mastertrees))
if (SortedValues(entries_dict) != SortedValues(master_entries_dict)):
logger.info(
'Sum of subjob tree entries is not the same as merged tree entries for file %s, failing job (check hadd output)', os.path.basename(f))
return self.failure
tf.Close()
return self.result
| gpl-2.0 | -378,378,038,954,568,200 | 41.364198 | 158 | 0.547137 | false | 4.524061 | false | false | false |
guillochon/FriendlyFit | mosfit/modules/seds/blackbody_cutoff.py | 5 | 4737 | """Definitions for the `BlackbodyCutoff` class."""
from math import pi
import numexpr as ne
import numpy as np
from astrocats.catalog.source import SOURCE
from astropy import constants as c
from astropy import units as u
from mosfit.constants import ANG_CGS, FOUR_PI
from mosfit.modules.seds.sed import SED
# Important: Only define one ``Module`` class per file.
class BlackbodyCutoff(SED):
"""Blackbody SED with cutoff.
Blackbody spectral energy dist. for given temperature and radius,
with a linear absorption function bluewards of a cutoff wavelength.
"""
_REFERENCES = [
{SOURCE.BIBCODE: '2017arXiv170600825N'}
]
C_CONST = c.c.cgs.value
FLUX_CONST = FOUR_PI * (
2.0 * c.h * c.c ** 2 * pi).cgs.value * u.Angstrom.cgs.scale
X_CONST = (c.h * c.c / c.k_B).cgs.value
STEF_CONST = (4.0 * pi * c.sigma_sb).cgs.value
F_TERMS = 10
def __init__(self, **kwargs):
"""Initialize module."""
super(BlackbodyCutoff, self).__init__(**kwargs)
self._nxcs = self.X_CONST * np.array(range(1, self.F_TERMS + 1))
def process(self, **kwargs):
"""Process module."""
kwargs = self.prepare_input(self.key('luminosities'), **kwargs)
self._luminosities = kwargs[self.key('luminosities')]
self._bands = kwargs['all_bands']
self._band_indices = kwargs['all_band_indices']
self._frequencies = kwargs['all_frequencies']
self._radius_phot = np.array(kwargs[self.key('radiusphot')])
self._temperature_phot = np.array(kwargs[self.key('temperaturephot')])
self._cutoff_wavelength = kwargs[self.key('cutoff_wavelength')]
self._times = np.array(kwargs['rest_times'])
xc = self.X_CONST # noqa: F841
fc = self.FLUX_CONST
cc = self.C_CONST
ac = ANG_CGS
cwave_ac = self._cutoff_wavelength * ac
cwave_ac2 = cwave_ac * cwave_ac
cwave_ac3 = cwave_ac2 * cwave_ac # noqa: F841
zp1 = 1.0 + kwargs[self.key('redshift')]
lt = len(self._times)
seds = np.empty(lt, dtype=object)
rp2 = self._radius_phot ** 2
tp = self._temperature_phot
evaled = False
for li, lum in enumerate(self._luminosities):
bi = self._band_indices[li]
# tpi = tp[li]
# rp2i = rp2[li]
if lum == 0.0:
seds[li] = np.zeros(
len(self._sample_wavelengths[bi]) if bi >= 0 else 1)
continue
if bi >= 0:
rest_wavs = self._sample_wavelengths[bi] * ac / zp1
else:
rest_wavs = np.array([cc / (self._frequencies[li] * zp1)])
# Apply absorption to SED only bluewards of cutoff wavelength
ab = rest_wavs < cwave_ac # noqa: F841
tpi = tp[li] # noqa: F841
rp2i = rp2[li] # noqa: F841
if not evaled:
# Absorbed blackbody: 0% transmission at 0 Angstroms 100% at
# >3000 Angstroms.
sed = ne.evaluate(
"where(ab, fc * (rp2i / cwave_ac / "
"rest_wavs ** 4) / expm1(xc / rest_wavs / tpi), "
"fc * (rp2i / rest_wavs ** 5) / "
"expm1(xc / rest_wavs / tpi))"
)
evaled = True
else:
sed = ne.re_evaluate()
sed[np.isnan(sed)] = 0.0
seds[li] = sed
uniq_times = np.unique(self._times)
tsort = np.argsort(self._times)
uniq_is = np.searchsorted(self._times, uniq_times, sorter=tsort)
lu = len(uniq_times)
norms = self._luminosities[
uniq_is] / (fc / ac * rp2[uniq_is] * tp[uniq_is])
rp2 = rp2[uniq_is].reshape(lu, 1)
tp = tp[uniq_is].reshape(lu, 1)
tp2 = tp * tp
tp3 = tp2 * tp # noqa: F841
nxcs = self._nxcs # noqa: F841
f_blue_reds = ne.evaluate(
"sum((exp(-nxcs / (cwave_ac * tp)) * ("
"nxcs ** 2 + 2 * ("
"nxcs * cwave_ac * tp + cwave_ac2 * tp2)) / ("
"nxcs ** 3 * cwave_ac3)) + "
"(6 * tp3 - exp(-nxcs / (cwave_ac * tp)) * ("
"nxcs ** 3 + 3 * nxcs ** 2 * cwave_ac * tp + 6 * ("
"nxcs * cwave_ac2 * tp2 + cwave_ac3 *"
"tp3)) / cwave_ac3) / (nxcs ** 4), 1)"
)
norms /= f_blue_reds
# Apply renormalisation
seds *= norms[np.searchsorted(uniq_times, self._times)]
seds = self.add_to_existing_seds(seds, **kwargs)
# Units of `seds` is ergs / s / Angstrom.
return {'sample_wavelengths': self._sample_wavelengths,
self.key('seds'): seds}
| mit | 1,407,704,762,395,699,200 | 34.088889 | 78 | 0.528605 | false | 3.189899 | false | false | false |
TissueMAPS/TmDeploy | tmdeploy/log.py | 2 | 2888 | # TmDeploy - Automated deployment of TissueMAPS in the cloud.
# Copyright (C) 2016 Markus D. Herrmann, University of Zurich
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import logging
#: dict[int, int]: Mapping of logging verbosity to logging level
VERBOSITY_TO_LEVELS = {
0: logging.NOTSET, # Nothing gets logged
1: logging.WARN, # For simplicity. Includes ERROR, CRITICAL
2: logging.INFO,
3: logging.DEBUG,
}
#: dict[int, int]: Mapping of logging level to logging verbosity
LEVELS_TO_VERBOSITY = {
logging.NOTSET: 0,
logging.WARN: 1,
logging.ERROR: 1,
logging.CRITICAL: 1,
logging.INFO: 2,
logging.DEBUG: 3,
}
def map_logging_verbosity(verbosity):
'''Maps logging verbosity to level expected by `logging` module.
Parameters
----------
verbosity: int
logging verbosity
Returns
-------
int
logging level
Raises
------
TypeError
when `verbosity` doesn't have type int
ValueError
when `verbosity` is negative
'''
if not isinstance(verbosity, int):
raise TypeError('Argument "verbosity" must have type int.')
if not verbosity >= 0:
raise ValueError('Argument "verbosity" must be a positive number.')
if verbosity >= len(VERBOSITY_TO_LEVELS):
verbosity = len(VERBOSITY_TO_LEVELS) - 1
return VERBOSITY_TO_LEVELS[verbosity]
def configure_logging(level=logging.DEBUG):
'''Configures the root logger for command line applications.
A stream handler will be added to the logger that directs
messages to the standard error stream.
By default, *no* messages will be filtered out: set a higher
level on derived/child loggers to achieve filtering.
Warning
-------
Logging should only be configured once at the main entry point of the
application!
'''
fmt = '%(asctime)s | %(levelname)-8s | %(name)-40s | %(message)s'
datefmt = '%Y-%m-%d %H:%M:%S'
formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)
logger = logging.getLogger() # returns the root logger
stderr_handler = logging.StreamHandler(stream=sys.stderr)
stderr_handler.name = 'err'
stderr_handler.setLevel(level)
stderr_handler.setFormatter(formatter)
logger.addHandler(stderr_handler)
| gpl-3.0 | 4,283,974,910,290,029,000 | 30.391304 | 75 | 0.690443 | false | 3.961591 | false | false | false |
rollos/PartyPlay | PartyPlay/views.py | 1 | 7664 | import json
from django.core import serializers
from django.forms import model_to_dict
from django.http import HttpResponseRedirect
from django.template.loader import render_to_string
from datetime import date, timedelta
from .helper_functions import *
from django.contrib import auth
from django.db.models import Count
from django.http import HttpResponse
from django.http import HttpResponseNotAllowed
from django.http import JsonResponse
from django.shortcuts import render, get_object_or_404, redirect
from django.utils import timezone
# Create your views here.
from django.template import RequestContext
from django.views import generic
from django.views.decorators.csrf import csrf_exempt
from django.views.generic.edit import FormMixin, CreateView
from PartyPlay.forms import UploadVideoForm
from PartyPlay.models import Room, Video
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_http_methods
class RoomModelListView(generic.ListView):
model = Room
context_object_name = 'room_list'
template_name = 'partyplay/roommodel_list.html'
def get_context_data(self, **kwargs):
context = super(RoomModelListView, self).get_context_data(**kwargs)
favorite_room_list = []
for room in context['room_list'].all():
if self.request.user in room.favorite_users.all():
favorite_room_list.append(room)
context['favorite_room_list'] = favorite_room_list
return context
class RoomModelDetailView(generic.DetailView):
model = Room
context_object_name = 'room_data'
template_name = 'partyplay/roommodel_detail.html'
upload_form = UploadVideoForm
def get_context_data(self, **kwargs):
# Call the base implementation to get the original context
context = super(RoomModelDetailView, self).get_context_data(**kwargs)
# Get the songs currently in the room
top_songs = get_ordered_videos(self.object)
context['queue'] = top_songs
if self.object.next_time and self.object.next_time < timezone.now():
update_currently_playing(self.object)
context['current_video'] = self.object.current_video
context['start_time'] = get_start_time(self.object)
if auth.user_logged_in:
upvoted = []
for video in top_songs:
if video.voters.filter(pk=self.request.user.pk).exists():
upvoted.append(video)
context['upvoted'] = upvoted
context['upload_form'] = self.upload_form
return context
@login_required
@require_http_methods(["POST"])
def add_video(request, pk):
room = Room.objects.get(pk=pk)
vid_id = request.POST.get('video_id')
duration = request.POST.get('duration')
title = request.POST.get('title')
video = Video()
video.uploader = auth.get_user(request)
video.title = title
video.duration = timedelta(seconds=int(duration))
video.room = room
video.videoID = vid_id
video.save()
video.voters.add(request.user)
video.save()
return render_current_queue(request, video.room)
@login_required
@require_http_methods(["POST"])
def favorite_room(request, pk):
room = Room.objects.get(pk=pk)
if request.user in room.favorite_users.all():
room.favorite_users.remove(request.user)
else:
room.favorite_users.add(request.user)
return HttpResponse()
from django.contrib.auth import login, authenticate
from django.contrib.auth.forms import UserCreationForm
from django.shortcuts import render, redirect
def signup(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=username, password=raw_password)
login(request, user)
return redirect('rooms')
else:
form = UserCreationForm()
return render(request, 'registration/signup.html', {'form': form})
def get_queue(request, pk):
room = Room.objects.get(pk=pk)
return render_current_queue(request, room)
@require_http_methods(["POST"])
def video_end(request, pk):
room = Room.objects.get(pk=pk)
#Finished video pk
str_val = request.POST.get("vid_pk")
#If there is no video on frontend
if (str_val == '' or str_val == None):
#If there is not a current video
if room.current_video is None:
new_pk = update_currently_playing(room)
else:
new_pk = room.current_video.pk
#Thereis not a video playing
else:
vid_pk = int(str_val)
#
#find the first user to finish their video
#This user's request will update current_video
#Any subsequent requests will have different pk's than current_video,
# they will only receive the updated data
if room.current_video is None:
new_pk = None
elif room.current_video.pk == vid_pk:
new_pk = update_currently_playing(room)
else:
new_pk = room.current_video.pk
if room.current_video:
current_vid = room.current_video.videoID
current_vid_name = room.current_video.title
uploader = room.current_video.uploader.username
else:
current_vid = None
current_vid_name = None
uploader = None
t_u_n = get_time_until_next(room)
videos = get_ordered_videos(room)
upvotes = []
for video in videos:
if request.user in video.voters.all():
upvotes.append(video.pk)
context = {
'current_video': room.current_video,
'time_until_next': get_time_until_next(room),
'queue': get_ordered_videos(room),
'upvotes': upvotes
}
response_data = {
'html': render_to_string('partyplay/video_and_queue.html', context=context, request=request),
'time_until_next': t_u_n,
'current_vid_pk': new_pk,
'current_vid_id': current_vid,
'current_vid_name': current_vid_name,
'current_uploader': uploader,
'start_time': get_start_time(room)
}
data = json.dumps(response_data)
return HttpResponse(data, content_type='application.json')
@login_required
def upvote(request):
context = RequestContext(request)
pk = request.GET['vid_pk']
video = get_object_or_404(Video, pk=pk)
user = auth.get_user(request)
if user not in video.voters.all():
video.voters.add(user)
else:
video.voters.remove(user)
video.save()
return render_current_queue(request, video.room)
class UserProfilePage(LoginRequiredMixin, generic.ListView):
model = Video
template_name = 'partyplay/userprofile.html'
def get_context_data(self, **kwargs):
context = super(UserProfilePage, self).get_context_data(**kwargs)
context['uploaded_videos'] = Video.objects.filter(uploader=self.request.user)
context['created_rooms'] = Room.objects.filter(creator=self.request.user).all()
return context
class RoomCreate(CreateView):
model = Room
fields = ['name', 'public']
template_name = 'partyplay/addroom.html'
def form_valid(self, form):
form.instance.creator = self.request.user
form.instance.url = form.cleaned_data['name'].lower().replace(" ","")
form.save()
return super(RoomCreate, self).form_valid(form)
| mit | 6,248,876,530,328,610,000 | 24.045752 | 101 | 0.658272 | false | 3.790307 | false | false | false |
anastue/netforce | netforce_product/netforce_product/models/product_custom_option.py | 4 | 1899 | # Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.model import Model, fields
class CustomOption(Model):
_name = "product.custom.option"
_string = "Custom Option"
_key = ["code"]
_fields = {
"name": fields.Char("Name", required=True, search=True, translate=True),
"seq": fields.Char("Sequence", required=True),
"code": fields.Char("Code", search=True),
"type": fields.Selection([["text", "Text"], ["selection", "Selection"]], "Type", required=True),
"required": fields.Boolean("Required"),
"description": fields.Text("Description"),
"price": fields.Decimal("Price"),
"values": fields.One2Many("product.custom.option.value", "cust_opt_id", "Values"),
}
_defaults = {
"type": "text",
"seq": '0',
}
CustomOption.register()
| mit | 6,331,416,349,369,496,000 | 43.162791 | 104 | 0.697736 | false | 4.11039 | false | false | false |
petertodd/python-opentimestamps | opentimestamps/tests/core/test_git.py | 2 | 6918 | # Copyright (C) 2016 The OpenTimestamps developers
#
# This file is part of python-opentimestamps.
#
# It is subject to the license terms in the LICENSE file found in the top-level
# directory of this distribution.
#
# No part of python-opentimestamps including this file, may be copied,
# modified, propagated, or distributed except according to the terms contained
# in the LICENSE file.
import unittest
import dbm
import git
import tempfile
from bitcoin.core import b2x
from opentimestamps.core.timestamp import *
from opentimestamps.core.op import *
from opentimestamps.core.git import *
class Test_GitTreeTimestamper(unittest.TestCase):
def setUp(self):
self.db_dirs = []
def tearDown(self):
for d in self.db_dirs:
d.cleanup()
del self.db_dirs
def make_stamper(self, commit):
# Yes, we're using our own git repo as the test data!
repo = git.Repo(__file__ + '../../../../../')
db_dir = tempfile.TemporaryDirectory()
self.db_dirs.append(db_dir)
db = dbm.open(db_dir.name + '/db', 'c')
tree = repo.commit(commit).tree
return GitTreeTimestamper(tree, db=db)
def test_blobs(self):
"""Git blob hashing"""
stamper = self.make_stamper("53c68bc976c581636b84c82fe814fab178adf8a6")
for expected_hexdigest, path in (('9e34b52cfa5724a4d87e9f7f47e2699c14d918285a20bf47f5a2a7345999e543', 'LICENSE'),
('ef83ecaca007e8afbfcca834b75510a98b6c10036374bb0d9f42a63f69efcd11', 'opentimestamps/__init__.py'),
('ef83ecaca007e8afbfcca834b75510a98b6c10036374bb0d9f42a63f69efcd11', 'opentimestamps/tests/__init__.py'),
('745bd9059cf01edabe3a61198fe1147e01ff57eec69e29f2e617b8e376427082', 'opentimestamps/tests/core/test_core.py'),
('ef83ecaca007e8afbfcca834b75510a98b6c10036374bb0d9f42a63f69efcd11', 'opentimestamps/tests/core/__init__.py'),
('7cd2b5a8723814be27fe6b224cc76e52275b1ff149de157ce374d290d032e875', 'opentimestamps/core/__init__.py'),
('d41fb0337e687b26f3f5dd61d10ec5080ff0bdc32f90f2022f7e2d9eeba91442', 'README')):
stamp = stamper[path]
actual_hexdigest = b2x(stamp.file_digest)
self.assertEqual(expected_hexdigest, actual_hexdigest)
stamper = self.make_stamper("30f6c357d578e0921dc6fffd67e2af1ce1ca0ff2")
empty_stamp = stamper["empty"]
self.assertEqual(empty_stamp.file_digest, bytes.fromhex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"))
def test_empty_tree(self):
"""Git tree with a single empty file"""
stamper = self.make_stamper("30f6c357d578e0921dc6fffd67e2af1ce1ca0ff2")
# There's a single empty file in this directory. Thus the nonce_key is:
nonce_key = OpSHA256()(OpSHA256()(b'') + # one empty file
b'\x01\x89\x08\x0c\xfb\xd0\xe8\x08') # tag
nonce1 = OpSHA256()(OpSHA256()(b'') + nonce_key)
assert nonce1[0] & 0b1 == 1
nonce2 = OpSHA256()(nonce1)
self.assertEqual(stamper.timestamp.msg,
OpSHA256()(b''))
self.assertEqual(stamper.timestamp.msg, b"\xe3\xb0\xc4B\x98\xfc\x1c\x14\x9a\xfb\xf4\xc8\x99o\xb9$'\xaeA\xe4d\x9b\x93L\xa4\x95\x99\x1bxR\xb8U")
def test_two_file_tree(self):
"""Git tree with a two files"""
stamper = self.make_stamper("78eb5cdc1ec638be72d6fb7a38c4d24f2be5d081")
nonce_key = OpSHA256()(OpSHA256()(b'a\n') +
OpSHA256()(b'b\n') +
b'\x01\x89\x08\x0c\xfb\xd0\xe8\x08') # tag
n_a_nonce1 = OpSHA256()(OpSHA256()(b'a\n') + nonce_key)
assert n_a_nonce1[0] & 0b1 == 0
n_a_nonce2 = OpSHA256()(n_a_nonce1)
n_a = OpSHA256()(OpSHA256()(b'a\n') + n_a_nonce2)
n_b_nonce1 = OpSHA256()(OpSHA256()(b'b\n') + nonce_key)
assert n_b_nonce1[0] & 0b1 == 0
n_b_nonce2 = OpSHA256()(n_b_nonce1)
n_b = OpSHA256()(OpSHA256()(b'b\n') + n_b_nonce2)
self.assertEqual(stamper.timestamp.msg,
OpSHA256()(n_a + n_b))
self.assertEqual(stamper.timestamp.msg, b's\x0e\xc2h\xd4\xb3\xa5\xd4\xe6\x0e\xe9\xb2t\x89@\x95\xc8c_F3\x81a=\xc2\xd4qy\xaf\x8e\xa0\x87')
def test_tree_with_children(self):
"""Git tree with child trees"""
stamper = self.make_stamper("b22192fffb9aad27eb57986e7fe89f8047340346")
# These correspond to the final values from the test_empty_tree() and
# test_two_file_tree() test cases above; git git commit we're testing
# has the trees associated with those test cases in the one/ and two/
# directories respectively.
d_one = b"\xe3\xb0\xc4B\x98\xfc\x1c\x14\x9a\xfb\xf4\xc8\x99o\xb9$'\xaeA\xe4d\x9b\x93L\xa4\x95\x99\x1bxR\xb8U"
d_two = b's\x0e\xc2h\xd4\xb3\xa5\xd4\xe6\x0e\xe9\xb2t\x89@\x95\xc8c_F3\x81a=\xc2\xd4qy\xaf\x8e\xa0\x87'
nonce_key = OpSHA256()(d_one + d_two +
b'\x01\x89\x08\x0c\xfb\xd0\xe8\x08') # tag
n_one_nonce1 = OpSHA256()(d_one + nonce_key)
assert n_one_nonce1[0] & 0b1 == 0
n_one_nonce2 = OpSHA256()(n_one_nonce1)
n_one = OpSHA256()(d_one + n_one_nonce2)
n_two_nonce1 = OpSHA256()(d_two + nonce_key)
assert n_two_nonce1[0] & 0b1 == 0
n_two_nonce2 = OpSHA256()(n_two_nonce1)
n_two = OpSHA256()(d_two + n_two_nonce2)
self.assertEqual(stamper.timestamp.msg,
OpSHA256()(n_one + n_two))
def test_tree_with_prefix_matching_blob(self):
"""Git tree with prefix matching blob"""
stamper = self.make_stamper("75736a2524c624c1a08a574938686f83de5a8a86")
two_a_stamp = stamper['two/a']
def test_submodule(self):
"""Git tree with submodule"""
stamper = self.make_stamper("a3efe73f270866bc8d8f6ce01d22c02f14b21a1a")
self.assertEqual(stamper.timestamp.msg,
OpSHA256()(bytes.fromhex('48b96efa66e2958e955a31a7d9b8f2ac8384b8b9')))
def test_dangling_symlink(self):
"""Git tree with dangling symlink"""
stamper = self.make_stamper("a59620c107a67c4b6323e6e96aed9929d6a89618")
self.assertEqual(stamper.timestamp.msg,
OpSHA256()(b'does-not-exist'))
def test_huge_tree(self):
"""Really big git tree"""
# would cause the OpSHA256 length limits to be exceeded if it were used
# directly
stamper = self.make_stamper("a52fe6e3d4b15057ff41df0509dd302bc5863c29")
self.assertEqual(stamper.timestamp.msg,
b'\x1dW\x9c\xea\x94&`\xc2\xfb\xba \x19Q\x0f\xdb\xf0\x7f\x14\xe3\x14zb\t\xdb\xcf\xf93I\xe9h\xb9\x8d')
| lgpl-3.0 | 1,395,141,965,360,004,000 | 43.346154 | 152 | 0.623735 | false | 2.828291 | true | false | false |
bird-house/pyramid-phoenix | phoenix/tasks/utils.py | 1 | 3035 | import datetime
import json
from phoenix.oauth2 import oauth2_client_factory
from pyramid_celery import celery_app as app
from celery.utils.log import get_task_logger
LOGGER = get_task_logger(__name__)
def task_result(task_id):
return app.AsyncResult(task_id)
def wait_secs(run_step=-1):
secs_list = (2, 2, 2, 2, 2, 5, 5, 5, 5, 5, 10, 10, 10, 10, 10, 20, 20, 20, 20, 20, 30)
if run_step >= len(secs_list):
run_step = -1
return secs_list[run_step]
def dump_json(obj):
def date_handler(obj):
if isinstance(obj, datetime.datetime) or isinstance(obj, datetime.date):
date_formatted = obj.isoformat()
else:
date_formatted = None
return date_formatted
return json.dumps(obj, default=date_handler)
def save_log(job, error=None):
if error:
log_msg = 'ERROR: {0.text} - code={0.code} - locator={0.locator}'.format(error)
else:
log_msg = '{0} {1:3d}%: {2}'.format(
job.get('duration', 0),
job.get('progress', 0),
job.get('status_message', 'no message'))
if 'log' not in job:
job['log'] = []
# skip same log messages
if len(job['log']) == 0 or job['log'][-1] != log_msg:
job['log'].append(log_msg)
if error:
LOGGER.error(log_msg)
else:
LOGGER.info(log_msg)
def add_job(db, task_id, process_id, title=None, abstract=None,
service_name=None, service=None, status_location=None,
caption=None, userid=None,
use_async=True):
tags = ['dev']
if use_async:
tags.append('async')
else:
tags.append('sync')
job = dict(
identifier=task_id,
task_id=task_id, # TODO: why not using as identifier?
userid=userid or 'guest',
service_name=service_name, # wps service name (service identifier)
service=service or service_name, # wps service title (url, service_name or service title)
process_id=process_id, # process identifier
title=title or process_id, # process title (identifier or title)
abstract=abstract or "No Summary",
status_location=status_location,
created=datetime.datetime.now(),
tags=tags,
caption=caption,
status="ProcessAccepted",
response=None,
request=None,
)
db.jobs.insert(job)
return job
def get_access_token(userid):
registry = app.conf['PYRAMID_REGISTRY']
# refresh access token
client = oauth2_client_factory(registry)
try:
token = client.refresh_token(userid=userid)
except Exception:
token = None
if token:
return token['access_token']
return None
def wps_headers(userid):
headers = {}
if userid:
access_token = get_access_token(userid)
if access_token:
headers = {'Authorization': 'Bearer {}'.format(access_token)}
LOGGER.debug('wps headers: {}'.format(headers))
return headers
| apache-2.0 | 1,692,529,810,388,217,900 | 28.754902 | 98 | 0.592751 | false | 3.579009 | false | false | false |
lmazuel/azure-sdk-for-python | azure-mgmt-resource/azure/mgmt/resource/resources/v2017_05_10/models/alias_path_type.py | 4 | 1040 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AliasPathType(Model):
"""The type of the paths for alias. .
:param path: The path of an alias.
:type path: str
:param api_versions: The API versions.
:type api_versions: list[str]
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'api_versions': {'key': 'apiVersions', 'type': '[str]'},
}
def __init__(self, path=None, api_versions=None):
super(AliasPathType, self).__init__()
self.path = path
self.api_versions = api_versions
| mit | 3,910,552,997,785,488,000 | 31.5 | 76 | 0.553846 | false | 4.315353 | false | false | false |
carlburch/hydra | server/py_template.py | 1 | 5720 | /*
* Copyright (c) 2014 Carl Burch
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
import sys
import json
from io import StringIO
class MyMap():
pass
_g = MyMap()
_g.sys = sys
_g.json = json
_g.StringIO = StringIO
_g.stdout = _g.sys.stdout
del sys
del json
del StringIO
del MyMap
# parameters from database/user
def _g_compile(code, name, isUser=False):
try:
return compile(code, name, 'exec')
except SyntaxError as e:
result = { 'ok': True, 'verdict': 0 if isUser else -2,
'file': name, 'line': e.lineno, 'offset': e.offset }
if isUser:
result['message'] = str(e)
else:
result['message'] = 'Error in {0}: {1}'.format(name, str(e))
_g.json.dump(result, _g.stdout)
_g.sys.exit(0)
_g.userCode = _g_compile("""{{userCode}}""", 'usercode', True)
_g.canSave = """{{usedVars}}""" != ''
if _g.canSave:
_g.saveCode = _g_compile("""_g.savedVars = {{usedVars}}""", 'vars')
_g.restoreCode = _g_compile("""{{usedVars}} = _g.savedVars""", 'vars')
_g.preCode = _g_compile("""{{preCode}}""", 'precode')
_g.solutionCode = _g_compile("""{{solutionCode}}""", 'solution')
_g.postCode = _g_compile("""{{postCode}}""", 'postcode')
del _g_compile
def _g_safeexec(code, name, isUser=False):
try:
exec(code, globals())
return None
except Exception as e:
test = getattr(_g, 'testInput', '???')
if isUser:
msg = '{0}: {1}'.format(type(e).__name__, str(e))
return { 'verdict': 1, 'test': test, 'message': msg }
else:
msg = '{0}: {1}: {2}'.format(name, type(e).__name__, str(e))
return { 'verdict': -1, 'test': test, 'message': msg }
_g.allVerdict = 10
_g.numCorrect = 0;
_g.tests = []
for testIter in range({{numIters}}):
_g.testIter = testIter
_g.result = None
try:
_g.sys.stdout = _g.StringIO()
_g.result = _g_safeexec(_g.preCode, 'precode')
if _g.result is not None:
continue
_g.testInput = _g.sys.stdout.getvalue()
# execute user code first (so it doesn't access solution variables)
if _g.canSave:
_g.result = _g_safeexec(_g.saveCode, 'vars')
if _g.result is not None:
continue
_g.sys.stdin = _g.StringIO(_g.testInput)
_g.sys.stdout = _g.StringIO()
_g.result = _g_safeexec(_g.userCode, 'usercode', True)
if _g.result is not None:
continue
_g.result = _g_safeexec(_g.postCode, 'postcode')
if _g.result is not None:
continue
_g.userOutput = _g.sys.stdout.getvalue()
# now execute solution answer to determine desired output
if _g.canSave:
_g.result = _g_safeexec(_g.restoreCode, 'vars')
if _g.result is not None:
continue
_g.sys.stdin = _g.StringIO(_g.testInput)
_g.sys.stdout = _g.StringIO()
_g.result = _g_safeexec(_g.solutionCode, 'solution')
if _g.result is not None:
continue
_g.result = _g_safeexec(_g.postCode, 'postcode')
if _g.result is not None:
continue
_g.solutionOutput = _g.sys.stdout.getvalue()
_g.thisMatch = _g.solutionOutput == _g.userOutput
if not _g.thisMatch:
solnLines = _g.solutionOutput.splitlines()
userLines = _g.userOutput.splitlines()
for i in range(min(len(solnLines), len(userLines))):
if solnLines[i] != userLines[i]:
_g.mismatchError = ('First mismatch on line {0}'
.format(i + 1))
break
else:
if len(solnLines) > len(userLines):
_g.mismatchError = ('Output is missing lines at end')
elif len(userLines) > len(solnLines):
_g.mismatchError = ('Output has extra lines at end')
else:
_g.thisMatch = True
del solnLines, userLines
if _g.thisMatch:
_g.numCorrect += 1
_g.result = { 'verdict': 3, 'test': _g.testInput,
'result': _g.userOutput, 'solution': _g.solutionOutput }
else:
_g.result = { 'verdict': 2, 'test': _g.testInput,
'result': _g.userOutput, 'solution': _g.solutionOutput,
'message': _g.mismatchError }
finally:
_g.tests.append(_g.result)
_g.allVerdict = min(_g.allVerdict, _g.result['verdict'])
testIter = _g.testIter
_g.json.dump({ 'ok': True, 'verdict': _g.allVerdict,
'correct': _g.numCorrect, 'tests': _g.tests },
_g.stdout)
| mit | -6,534,231,385,856,816,000 | 37.133333 | 75 | 0.577098 | false | 3.507051 | true | false | false |
vicnet/weboob | modules/genericnewspaper/pages.py | 2 | 4881 | # -*- coding: utf-8 -*-
# Copyright(C) 2011 Julien Hebert
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from weboob.browser.pages import HTMLPage
from weboob.browser.filters.html import XPath, XPathNotFound
from weboob.browser.filters.standard import CleanText
from lxml.etree import Comment
class Article(object):
author = u''
title = u''
def __init__(self, browser, _id):
self.browser = browser
self.id = _id
self.body = u''
self.url = u''
self.date = None
class GenericNewsPage(HTMLPage):
__element_body = NotImplementedError
__article = Article
element_title_selector = NotImplementedError
main_div = NotImplementedError
element_body_selector = NotImplementedError
element_author_selector = NotImplementedError
_selector = XPath
def on_load(self):
self.handle_refresh()
self.on_loaded()
def on_loaded(self):
pass
def get_body(self):
try:
return CleanText('.')(self.get_element_body())
except (AttributeError):
return self.__article.body
def get_author(self):
try:
return CleanText('.')(self.get_element_author())
except (AttributeError):
return self.__article.author
def get_title(self):
try:
return CleanText(self._selector(self.element_title_selector))(self.main_div)
except AttributeError:
if self.main_div is None:
raise XPathNotFound("main_div is none on %s" % (self.browser))
elif self.element_title_selector != 'h1':
self.element_title_selector = 'h1'
return self.get_title()
else:
raise AttributeError("no title on %s" % (self.browser))
def get_element_body(self):
try:
return self._selector(self.element_body_selector)(self.main_div)[0]
except (AttributeError, IndexError):
if self.main_div is None:
raise XPathNotFound("main_div is none on %s" % (self.browser))
else:
raise AttributeError("no body on %s" % (self.browser))
def get_element_author(self):
try:
return self._selector(self.element_author_selector)(self.main_div)[0]
except IndexError:
if self.main_div is None:
raise XPathNotFound("main_div is none on %s" % (self.browser))
else:
raise AttributeError("no author on %s" % (self.browser))
def get_article(self, _id):
__article = Article(self.browser, _id)
__article.author = self.get_author()
__article.title = self.get_title()
__article.url = self.url
__article.body = self.get_body()
return __article
def drop_comments(self, base_element):
for comment in base_element.getiterator(Comment):
comment.drop_tree()
def try_remove(self, base_element, selector):
for el in self._selector(selector)(base_element):
try:
el.getparent().remove(el)
except (AttributeError, ValueError):
continue
def remove_from_selector_list(self, base_element, selector_list):
for selector in selector_list:
base_element.remove(self._selector(selector)(base_element))
def try_remove_from_selector_list(self, base_element, selector_list):
for selector in selector_list:
self.try_remove(base_element, selector)
def try_drop_tree(self, base_element, selector):
for el in self._selector(selector)(base_element):
el.drop_tree()
@staticmethod
def clean_relativ_urls(base_element, domain):
for a in base_element.findall('.//a'):
if "href" in a.attrib:
if a.attrib["href"] and a.attrib["href"][0:7] != "http://" and a.attrib["href"][0:7] != "https://":
a.attrib["href"] = domain + a.attrib["href"]
for img in base_element.findall('.//img'):
if img.attrib["src"][0:7] != "http://" and img.attrib["src"][0:7] != "https://":
img.attrib["src"] = domain + img.attrib["src"]
| lgpl-3.0 | 7,036,844,833,147,861,000 | 34.889706 | 115 | 0.612579 | false | 4.037221 | false | false | false |
bccp/nbodykit | nbodykit/tests/test_transform.py | 1 | 7209 | from runtests.mpi import MPITest
from nbodykit.lab import *
from nbodykit import setup_logging
from nbodykit.transform import ConstantArray
from numpy.testing import assert_allclose
import pytest
# debug logging
setup_logging("debug")
@MPITest([1, 4])
def test_sky_to_cartesian(comm):
cosmo = cosmology.Planck15
# make source
s = RandomCatalog(csize=100, seed=42, comm=comm)
# ra, dec, z
s['z'] = s.rng.normal(loc=0.5, scale=0.1)
s['ra'] = s.rng.uniform(low=110, high=260)
s['dec'] = s.rng.uniform(low=-3.6, high=60)
# make the position array
s['Position1'] = transform.SkyToCartesian(s['ra'], s['dec'], s['z'], cosmo)
# wrong name
with pytest.warns(FutureWarning):
s['Position0'] = transform.SkyToCartesion(s['ra'], s['dec'], s['z'], cosmo)
s['Position1'] = transform.SkyToCartesian(s['ra'].compute(), s['dec'], s['z'], cosmo)
@MPITest([1, 4])
def test_cartesian_to_equatorial(comm):
# make source
s = UniformCatalog(nbar=10000, BoxSize=1.0, comm=comm)
# get RA, DEC
ra, dec = transform.CartesianToEquatorial(s['Position'], observer=[0.5, 0.5, 0.5])
# check bounds
assert ((ra >= 0.)&(ra < 360.)).all().compute()
assert ((dec >= -90)&(dec < 90.)).all().compute()
ra, dec = transform.CartesianToEquatorial(s['Position'], observer=[0.5, 0.5, 0.5], frame='galactic')
# check bounds
assert ((ra >= 0.)&(ra < 360.)).all().compute()
assert ((dec >= -90)&(dec < 90.)).all().compute()
@MPITest([1, 4])
def test_cartesian_to_sky(comm):
cosmo = cosmology.Planck15
# make source
s = UniformCatalog(nbar=10000, BoxSize=1.0, seed=42, comm=comm)
# get RA, DEC, Z
ra, dec, z = transform.CartesianToSky(s['Position'], cosmo, observer=[0.5, 0.5, 0.5])
# reverse and check
pos2 = transform.SkyToCartesian(ra, dec, z, cosmo, observer=[0.5, 0.5, 0.5])
assert_allclose(s['Position'], pos2, rtol=1e-5, atol=1e-7)
_ = transform.CartesianToSky(s['Position'].compute(), cosmo)
@MPITest([1, 4])
def test_cartesian_to_sky_galactic(comm):
cosmo = cosmology.Planck15
# make source
s = UniformCatalog(nbar=10000, BoxSize=1.0, seed=42, comm=comm)
# get RA, DEC, Z
ra, dec, z = transform.CartesianToSky(s['Position'], cosmo, frame='galactic')
ra1, dec1, z1 = transform.CartesianToSky(s['Position'].compute(), cosmo, frame='galactic')
assert_allclose(ra, ra1)
assert_allclose(dec, dec1)
assert_allclose(z, z1)
# reverse and check
pos2 = transform.SkyToCartesian(ra, dec, z, cosmo, frame='galactic')
numpy.testing.assert_allclose(s['Position'], pos2, rtol=1e-5)
@MPITest([1, 4])
def test_cartesian_to_sky_velocity(comm):
cosmo = cosmology.Planck15
# make source
s = UniformCatalog(nbar=1e-5, BoxSize=1380., seed=42, comm=comm)
# real-space redshift
_, _, z_real = transform.CartesianToSky(s['Position'], cosmo,
observer=[-1e3, -1e3, -1e3])
# redshift-space redshift
_, _, z_redshift = transform.CartesianToSky(s['Position'], cosmo,
velocity=s['Velocity'],
observer=[-1e3, -1e3, -1e3])
numpy.testing.assert_allclose(z_real, z_redshift, rtol=1e-3)
# bad z max value
with pytest.raises(ValueError):
_, _, z = transform.CartesianToSky(s['Position'], cosmo, observer=[-1e4, -1e4, -1e4], zmax=0.5)
z = z.compute()
@MPITest([1, 4])
def test_stack_columns(comm):
# make source
s = RandomCatalog(csize=100, seed=42, comm=comm)
# add x,y,z
s['x'] = s.rng.uniform(0, 2600.)
s['y'] = s.rng.uniform(0, 2600.)
s['z'] = s.rng.uniform(0, 2600.)
# stack
s['Position'] = transform.StackColumns(s['x'], s['y'], s['z'])
# test equality
x, y, z = s.compute(s['x'], s['y'], s['z'])
pos = numpy.vstack([x,y,z]).T
numpy.testing.assert_array_equal(pos, s['Position'])
# requires dask array
s['Position'] = transform.StackColumns(x,y,z)
@MPITest([1, 4])
def test_halofuncs(comm):
from nbodykit.cosmology import Planck15
# make two sources
# make source
s = RandomCatalog(csize=300000, seed=42, comm=comm)
s['mass'] = s.rng.uniform() * 1e13
s['z'] = s.rng.uniform()
r = transform.HaloRadius(s['mass'], redshift=s['z'], cosmo=Planck15)
r.compute()
r = transform.HaloConcentration(s['mass'], redshift=s['z'], cosmo=Planck15)
r.compute()
r = transform.HaloVelocityDispersion(s['mass'], redshift=s['z'], cosmo=Planck15)
r.compute()
r = transform.HaloRadius(s['mass'], redshift=0, cosmo=Planck15)
r.compute()
r = transform.HaloConcentration(s['mass'], redshift=0, cosmo=Planck15)
r.compute()
r = transform.HaloVelocityDispersion(s['mass'], redshift=0, cosmo=Planck15)
r.compute()
@MPITest([1, 4])
def test_combine(comm):
# make two sources
s1 = UniformCatalog(3e-6, 2600, comm=comm)
s2 = UniformCatalog(3e-6, 2600, comm=comm)
# concatenate all columns
cat = transform.ConcatenateSources(s1, s2)
# check the size and columns
assert cat.size == s1.size + s2.size
assert set(cat.columns) == set(s1.columns)
# only one column
cat = transform.ConcatenateSources(s1, s2, columns='Position')
pos = numpy.concatenate([numpy.array(s1['Position']), numpy.array(s2['Position'])], axis=0)
numpy.testing.assert_array_equal(pos, cat['Position'])
# fail on invalid column
with pytest.raises(ValueError):
cat = transform.ConcatenateSources(s1, s2, columns='InvalidColumn')
@MPITest([1])
def test_constarray(comm):
a = ConstantArray(1.0, 1, chunks=1000)
assert len(a) == 1
assert a.shape == (1,)
a = ConstantArray([1.0, 1.0], 1, chunks=1000)
assert a.shape == (1, 2)
a = ConstantArray([1.0, 1.0], 3, chunks=1000)
assert a.shape == (3, 2)
@MPITest([1, 4])
def test_vector_projection(comm):
cosmo = cosmology.Planck15
# make source
s = UniformCatalog(nbar=1e-5, BoxSize=1380., seed=42, comm=comm)
x = transform.VectorProjection(s['Position'], [1, 0, 0])
y = transform.VectorProjection(s['Position'], [0, 1, 0])
z = transform.VectorProjection(s['Position'], [0, 0, 1])
d = transform.VectorProjection(s['Position'], [1, 1, 1])
nx = transform.VectorProjection(s['Position'], [-2, 0, 0])
ny = transform.VectorProjection(s['Position'], [0, -2, 0])
nz = transform.VectorProjection(s['Position'], [0, 0, -2])
nd = transform.VectorProjection(s['Position'], [-2, -2, -2])
numpy.testing.assert_allclose(x, s['Position'] * [1, 0, 0], rtol=1e-3)
numpy.testing.assert_allclose(y, s['Position'] * [0, 1, 0], rtol=1e-3)
numpy.testing.assert_allclose(z, s['Position'] * [0, 0, 1], rtol=1e-3)
numpy.testing.assert_allclose(d[:, 0], s['Position'].sum(axis=-1) / 3., rtol=1e-3)
numpy.testing.assert_allclose(nx, s['Position'] * [1, 0, 0], rtol=1e-3)
numpy.testing.assert_allclose(ny, s['Position'] * [0, 1, 0], rtol=1e-3)
numpy.testing.assert_allclose(nz, s['Position'] * [0, 0, 1], rtol=1e-3)
numpy.testing.assert_allclose(nd[:, 0], s['Position'].sum(axis=-1) / 3., rtol=1e-3)
| gpl-3.0 | -7,891,218,668,070,126,000 | 32.375 | 104 | 0.619642 | false | 2.884754 | true | false | false |
sequana/sequana | sequana/utils/datatables_js.py | 1 | 13957 | # coding: utf-8
#
# This file is part of Sequana software
#
# Copyright (c) 2016 - Sequana Development Team
#
# File author(s):
# Dimitri Desvillechabrol <[email protected]>,
# <[email protected]>
#
# Distributed under the terms of the 3-clause BSD license.
# The full license is in the LICENSE file, distributed with this software.
#
# website: https://github.com/sequana/sequana
# documentation: http://sequana.readthedocs.io
#
##############################################################################
""" Utilities to create a Jquery DataTable for your HTML file.
.. autosummary::
DataTableFunction
DataTable
"""
from collections import OrderedDict
import colorlog
logger = colorlog.getLogger(__name__)
class DataTableFunction(object):
""" Class that contains Jquery DataTables function and options.
Example:
::
import pandas as pd
from sequana.utils import DataTableFunction
df = pandas.read_csv('data.csv')
datatable_js = DataTableFunction(df, 'data')
datatable_js.datatable_options = {'pageLength': 15,
'dom': 'Bfrtip',
'buttons': ['copy', 'csv']}
js = datatable_js.create_javascript_function()
html_datatables = [DataTable(df, "data_{0}".format(i), datatable_js)
for i, df in enumerate(df_list)]
Here, the datatable_options dictionary is used to fine tune the appearance
of the table.
.. note:: DataTables add a number of elements around the table to control
the table or show additional information about it. There are controlled
by the order in the document (**DOM**) defined as a string made of
letters, each of them having a precise meaning. The order of the letter
is important. For instance if **B** is first, the buttons are put before
the table. If **B** is at the end, it is shown below the table.
Here are some of the valid letters and their meaning:
- **B**: add the Buttons (copy/csv)
- **i**: add *showing 1 to N of M entries*
- **f**: add a search bar (**f** filtering)
- **r**: processing display element
- **t**: the table itself
- **p**: pagination control
Each option can be specified multiple times (with the exception of the
table itself).
.. note:: other useful options are:
- pageLength: 15
- scrollX: "true"
- paging: 15
- buttons: ['copy', 'csv']
Note that buttons can also be excel, pdf, print, ...
All options of datatable:
https://datatables.net/reference/option/
"""
def __init__(self, df, html_id, index=False):
""".. rubric:: contructor
:param df: data frame.
:param str html_id: the ID used in the HTML file.
"""
self.index = index
self._html_id = html_id
self._datatable_options = dict()
self._datatable_columns = self._set_datatable_columns(df)
@property
def html_id(self):
""" Get the html_id, which cannot be set by the user after the
instanciation of the class.
"""
return self._html_id
@property
def datatable_options(self):
""" Get, set or delete the DataTable options. Setter takes a dict as
parameter with the desired options and updates the current dictionary.
Example::
datatable = DataTableFunction("tab")
datatable.datatable_options = {'dom': 'Bfrtip',
'buttons': ['copy', 'csv']}
source: https://datatables.net/reference/option/
"""
return self._datatable_options
@datatable_options.setter
def datatable_options(self, d):
try:
d['buttons'] = self._add_export_visible(d['buttons'])
except KeyError:
pass
self._datatable_options.update(d)
def _add_export_visible(self, buttons):
""" Add option to disable the exporting of hidden columns
"""
try:
for b in buttons:
b.update({'exportOptions': {'columns': ':visible'}})
except AttributeError:
buttons = [{'extend': b, 'exportOptions': {'columns': ':visible'}}
for b in buttons]
return buttons
@datatable_options.deleter
def datatable_options(self):
self._datatable_options = dict()
@property
def datatable_columns(self):
""" Get datatable_columns dictionary. It is automatically set from the
dataframe you want to plot.
"""
return self._datatable_columns
def _set_datatable_columns(self, df):
""" Fill :attr:`DataTableFunction.datatable_columns` with header of
:param:`DataTableFunction.df`.
"""
from pandas import Series
if isinstance(df, Series):
return {}
if self.index is True:
columns = [""] + list(df.columns)
else:
columns = list(df.columns)
column_dict = OrderedDict((name, dict()) for name in columns)
return column_dict
def create_javascript_function(self):
""" Return javascript to create the DataTable.
"""
js_function = """
<script type="text/javascript">
function parseCsv_{0}(csv, id) {{
Papa.parse(csv, {{
comments: '#',
delimiter: ',',
header: true,
dynamicTyping: true,
error: function(reason) {{
console.log(reason);
}},
complete: function(results) {{
{1}
}}
}});
}};
</script>
"""
return js_function.format(self.html_id,
self._create_datatable_option())
def _create_datatable_option(self):
""" Return DataTable options.
"""
self.datatable_options['columns'] = self._create_columns_option()
js = self._dict_to_string(self.datatable_options)
js = "$(id).DataTable({{{0},data: results.data}});".format(js)
return js
def _create_columns_option(self):
""" Return string well formated with all columns options.
"""
js = [self._coloption_2_str(key, value) for key, value in
self.datatable_columns.items()]
return '[{0}]'.format(',\n'.join(js))
def _coloption_2_str(self, name, options):
s = "data:'{0}'".format(name)
if options:
s = "{0},\n{1}".format(s, self._dict_to_string(options))
return '{{{0}}}'.format(s)
def _dict_to_string(self, d):
""" Convert dict to string for CanvasJS.
Example:
::
dico = {'key1': value1, 'key2': value2, 'key3': value3}
print(CanvasJS._dict_to_string(dico))
"key1:value1,key2:value2,key3:value3"
"""
s = ['{0}:{1}'.format(key, self._check_type(value)) for key, value in
d.items()]
return ',\n'.join(s)
def _check_type(self, value):
""" Check value type to fill javascript sections. String must be
surrounded by quotes and not boolean or integer.
Javascript variable must not be surrounded by quotes. Custom variables
start with 'data_'.
"""
try:
if not value.startswith(('true', 'false', 'function', '{', '[')):
return "'{0}'".format(value)
except AttributeError:
return value
return value
def set_links_to_column(self, link_col, target_col, new_page=True):
"""Hide a column with urls and connect it with a column.
:param str link_col: column with your URLs.
:param str target_col: column to connect.
"""
# hide the link column
try:
self.datatable_columns[link_col]['visible'] = 'false'
except KeyError:
keys = self.datatable_columns.keys()
logger.warning(f"KeyError: Column name '{target_col}' does not exist. Use one of {keys}")
# function to add link
if new_page is True:
fct = """function(data, type, row, meta){{
return '<a href="'+row.{0}+'" target="_blank">'+data+'</a>';
}}
""".format(link_col)
else:
fct = """function(data, type, row, meta){{
return '<a href="'+row.{0}+'">'+data+'</a>';
}}
""".format(link_col)
try:
self.datatable_columns[target_col]['render'] = fct
except KeyError:
logger.warning("KeyError: Column name '{0}' does not exist."
.format(target_col))
pass
def set_tooltips_to_column(self, tooltips_col, target_col):
"""Hide a column with tooltips and connect it with a column.
:param str tooltips_col: column with your tooltips.
:param str target_col: column to connect.
"""
# hide tooltips
try:
self.datatable_columns[tooltips_col]['visible'] = 'false'
except KeyError:
logger.warning("KeyError: Column name '{0}' does not exist."
.format(target_col))
pass
# function to add tooltips
fct = """function(data, type, row, meta){{
return '<a href="#" data-toggle="tooltip" title="'+row.{0}+'">'+data+'</a>';
}}
""".format(tooltips_col)
try:
self.datatable_columns[target_col]['render'] = fct
except KeyError:
logger.warning("KeyError: Column name '{0}' does not exist."
.format(target_col))
pass
class DataTable(object):
""" Class that contains html table which used a javascript function.
You must add in your HTML file the JS function
(:meth:`DataTable.create_javascript_function`) and the HTML code
(:meth:`DataTable.create_datatable`).
Example:
::
df = pandas.read_csv('data.csv')
datatable = DataTable(df, 'data')
datatable.datatable.datatable_options = {'pageLength': 15,
'dom': 'Bfrtip',
'buttons': ['copy', 'csv']}
js = datatable.create_javascript_function()
html = datatable.create_datatable()
# Second CSV file with same format
df2 = pandas.read_csv('data2.csv')
datatable2 = DataTable(df2, 'data2', datatable.datatable)
html2 = datatable.create_datatable()
The reason to include the JS manually is that you may include many HTML
table but need to include the JS only once.
"""
def __init__(self, df, html_id, datatable=None, index=False):
""".. rubric:: contructor
:param df: data frame.
:param str html_id: the unique ID used in the HTML file.
:param DataTableFunction datatable: javascript function to create the
Jquery Datatables. If None, a :class:`DataTableFunction` is
generated from the df.
:param bool index: indicates whether the index dataframe should
be included in the CSV table
"""
self.index = index
self._df = df
self._html_id = html_id
if datatable:
self.datatable = datatable
else:
self.datatable = DataTableFunction(df, html_id, index=index)
def __len__(self):
return len(self.df)
@property
def df(self):
return self._df
@property
def html_id(self):
return self._html_id
def create_datatable(self, style="width:100%", **kwargs):
""" Return string well formated to include in a HTML page.
:param str style: CSS option of your table.
:param **dict kwargs: parameters of :meth:`pandas.DataFrame.to_csv`.
"""
html = """
<script type="text/javascript">
$(document).ready(function() {{
var {0} = document.getElementById('csv_{0}').innerText;
parseCsv_{1}({0}, '#table_{0}');
{0} = null;
}});
</script>
""".format(self.html_id, self.datatable.html_id)
html += self._create_hidden_csv(**kwargs)
html += self._create_html_table(style)
return html
def _create_hidden_csv(self, **kwargs):
""" Return the HTML code and the CSV code for your hidden CSV section.
:param **dict kwargs: parameters of :meth:`pandas.DataFrame.to_csv`.
"""
csv = self._df.to_csv(index=self.index, **kwargs)
html = '<pre id="csv_{0}">{1}</pre>'.format(self.html_id, csv.strip())
css = '<style>#csv_{0}{{display:none}}</style>'.format(self.html_id)
return '{0}\n{1}\n'.format(css, html)
def _create_html_table(self, style):
""" Just for set some option and header.
:param str style: css option of your table.
"""
# set table id
if style:
style = 'style="{0}"'.format(style)
html_table = (
'<table id="table_{0}" class="display table text-center" {1}>'
.format(self.html_id, style)
)
# create table's header
th = '<th>{0}</th>'
if self.index is True:
header = [th.format("")]
header += [th.format(name) for name in self.df]
else:
header = [th.format(name) for name in self.df]
header = '<thead><tr>{0}</tr></thead>'.format("\n".join(header))
html_table = """
{0}
{1}
</table>
""".format(html_table, header)
return html_table
def create_javascript_function(self):
""" Generate the javascript function to create the DataTable in a HTML
page.
"""
return self.datatable.create_javascript_function()
| bsd-3-clause | 7,109,333,823,725,242,000 | 32.550481 | 101 | 0.556996 | false | 4.044335 | false | false | false |
c3nav/c3nav | src/c3nav/mapdata/migrations/0019_location_group_category.py | 1 | 2278 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-10 11:39
from __future__ import unicode_literals
import c3nav.mapdata.fields
from django.db import migrations, models
import django.db.models.deletion
def create_location_group_category(apps, schema_editor):
LocationGroupCategory = apps.get_model('mapdata', 'LocationGroupCategory')
category = LocationGroupCategory.objects.create(name='groups', titles={
'en': 'Location Groups',
'de': 'Ortgruppen',
})
LocationGroup = apps.get_model('mapdata', 'LocationGroup')
LocationGroup.objects.update(category=category)
class Migration(migrations.Migration):
dependencies = [
('mapdata', '0018_auto_20170708_1752'),
]
operations = [
migrations.CreateModel(
name='LocationGroupCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('titles', c3nav.mapdata.fields.JSONField(default={})),
('name', models.SlugField(unique=True, verbose_name='Name')),
],
options={
'verbose_name': 'Location Group Category',
'verbose_name_plural': 'Location Group Categories',
'default_related_name': 'locationgroupcategories',
},
),
migrations.AlterModelOptions(
name='locationslug',
options={'verbose_name': 'Location with Slug', 'verbose_name_plural': 'Location with Slug'},
),
migrations.AddField(
model_name='locationgroup',
name='category',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='groups', to='mapdata.LocationGroupCategory', verbose_name='Location Group Category'),
),
migrations.RunPython(create_location_group_category, migrations.RunPython.noop),
migrations.AlterField(
model_name='locationgroup',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='groups',
to='mapdata.LocationGroupCategory', verbose_name='Location Group Category'),
),
]
| apache-2.0 | 7,975,349,540,934,128,000 | 39.678571 | 191 | 0.623354 | false | 4.298113 | false | false | false |
allista/GetIsolationSources | setup.py | 1 | 2142 | #!/usr/bin/python
# coding=utf-8
#
# Copyright (C) 2012 Allis Tauri <[email protected]>
#
# indicator_gddccontrol is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# indicator_gddccontrol is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Created on Oct 14, 2014
@author: Allis Tauri <[email protected]>
"""
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def lines(fname):
return list(l.strip() for l in open(os.path.join(os.path.dirname(__file__), fname)))
setup(name='GetIsolationSources',
version='1.5.2',
description='Retrieves isolation sources from NCBI given the set of sequences with '
'specified accession numbers. Both nucleotide and protein accessions are accepted.',
long_description=read('README.md'),
license='MIT',
author='Allis Tauri',
author_email='[email protected]',
url='https://github.com/allista/GetIsolationSources',
keywords=['bioinformatics', 'ncbi', 'entrez'],
classifiers=[
'Development Status :: 4 - Beta',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Intended Audience :: Science/Research',
'Operating System :: POSIX',
'Programming Language :: Python'],
packages=[],
scripts=['get_isolation_sources'],
install_requires=lines('requirements.txt'),
)
| mit | -1,124,134,146,727,466,500 | 34.7 | 90 | 0.696078 | false | 3.712305 | false | false | false |
dev-platypus/marche | marche/daemon.py | 1 | 6197 | # -*- coding: utf-8 -*-
# *****************************************************************************
# Marche - A server control daemon
# Copyright (c) 2015-2016 by the authors, see LICENSE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Module authors:
# Georg Brandl <[email protected]>
# Alexander Lenz <[email protected]>
#
# *****************************************************************************
from __future__ import print_function
import os
import sys
import time
import signal
import logging
import argparse
from os import path
from marche import __version__
from marche.config import Config
from marche.utils import daemonize, setuser, write_pidfile, remove_pidfile, \
get_default_cfgdir
from marche.loggers import ColoredConsoleHandler, LogfileHandler
from marche.handler import JobHandler
from marche.auth import AuthHandler
from marche.colors import nocolor
# configure logging library: we don't need process/thread ids and callers
logging.logMultiprocessing = False
logging.logProcesses = False
logging.logThreads = False
logging._srcfile = None # pylint: disable=protected-access
class Daemon(object):
def __init__(self):
self.stop = False
self.log = logging.getLogger('marche')
if os.name == 'nt': # pragma: no cover
nocolor()
def parse_args(self, args):
rootdir = path.join(path.dirname(__file__), '..')
if path.exists(path.join(rootdir, '.git')):
default_cfgdir = path.abspath(path.join(rootdir, 'etc'))
else: # pragma: no cover
default_cfgdir = get_default_cfgdir()
parser = argparse.ArgumentParser()
parser.add_argument('--version', action='version',
version='Marche daemon version %s' % __version__)
parser.add_argument('-c', dest='configdir', action='store',
default=default_cfgdir, help='configuration '
'directory (default %s)' % default_cfgdir)
parser.add_argument('-d', dest='daemonize', action='store_true',
help='daemonize the process')
parser.add_argument('-v', dest='verbose', action='store_true',
help='verbose (debug) output')
return parser.parse_args(args)
def apply_config(self):
self.config = Config(self.args.configdir)
if self.args.daemonize: # pragma: no cover
daemonize(self.config.user, self.config.group)
else:
setuser(self.config.user, self.config.group)
self.log.setLevel(logging.DEBUG if self.args.verbose else logging.INFO)
del self.log.handlers[:]
if not self.args.daemonize:
self.log.addHandler(ColoredConsoleHandler())
try:
self.log.addHandler(LogfileHandler(self.config.logdir, 'marche'))
except Exception as err: # pragma: no cover
if self.args.daemonize:
print('cannot open logfile:', err, file=sys.stderr)
else:
self.log.exception('cannot open logfile: %s', err)
return False
if not self.config.interfaces:
self.log.error('no interfaces configured, the daemon will not do '
'anything useful!')
return False
if not self.config.job_config:
self.log.error('no jobs configured, the daemon will not do '
'anything useful!')
return False
if not self.config.auth_config:
self.log.warning('no authenticators configured, everyone will be '
'able to execute any action!')
if self.args.daemonize: # pragma: no cover
write_pidfile(self.config.piddir)
return True
def run(self, args=None):
self.args = self.parse_args(args)
if not self.apply_config():
return 1
self.log.info('Starting marche %s ...', __version__)
jobhandler = JobHandler(self.config, self.log)
authhandler = AuthHandler(self.config, self.log)
for interface in self.config.interfaces:
try:
mod = __import__('marche.iface.%s' % interface, {}, {},
['Interface'])
except Exception as err:
self.log.exception('could not import interface %r: %s',
interface, err)
continue
self.log.info('starting interface: %s', interface)
try:
iface = mod.Interface(self.config, jobhandler, authhandler,
self.log)
if iface.needs_events:
jobhandler.add_interface(iface)
iface.run()
except Exception as err:
self.log.exception('could not start interface %r: %s',
interface, err)
continue
signal.signal(signal.SIGTERM, lambda *a: setattr(self, 'stop', True))
signal.signal(signal.SIGUSR1, lambda *a: jobhandler.trigger_reload())
self.log.info('startup successful')
self.wait()
jobhandler.shutdown()
if self.args.daemonize: # pragma: no cover
remove_pidfile(self.config.piddir)
return 0
def wait(self): # pragma: no cover
try:
while not self.stop:
time.sleep(1)
except KeyboardInterrupt:
pass
| gpl-2.0 | -4,068,841,360,695,524,400 | 36.557576 | 79 | 0.586736 | false | 4.336599 | true | false | false |
popcorn9499/chatBot | main.py | 1 | 7120 |
from concurrent.futures import ThreadPoolExecutor #async to sync
from modules import variables
from modules import mainBot
from modules import fileIO
from modules import youtube
from modules import discordBot
#used for the main program
import threading
import sys, os
#discord stuff imported
import discord #gets the discord and asyncio librarys
import asyncio
##broken
#seems to not like my code in the discordCheckMsg function
##its the part where it sets the value to the delete code.
#this then causes the delete thread to crash trying to find the shift value to go by
##problems
#unsure what will happen in a headless enviroment if the oauth hasnt been set
##if the token and you input a invalid token the first time it will continue to say invalid token for tokens that are even valid
####variables
from modules import irc
#used as global varibles and were defined before we start using them to avoid problems down the road
##jadens shift code
#delete code is: 98reghwkjfgh8932guicdsb98r3280yioufsdgcgbf98
#delete code is: 98reghwkjfgh8932guicdsb98r3280yioufsdgcgbf98
def shift(value, messages):
if value == 0:
return messages
messagesTemp = [] #Assign temp list
for i in messages: #For every message
messagesTemp += [i,] #Add to temp list
messages.clear() #Delete old list
for i in messagesTemp[value:]: #For all values after last delete code
messages += [i,] #Take value from temp list and put in new spot
messagesTemp.clear() #Delete temp list
return messages
def checkDeleteCode(messages):
i = 0 #Set i to 0
#print("{0} : {1}".format(haltDeleteMSG,haltDiscordMSG)) #debug that isnt really nessisary if this code isnt used.
while(messages[i] == "98reghwkjfgh8932guicdsb98r3280yioufsdgcgbf98"): #While value at index is the delete code
i += 1 #Add 1 to i
return i #Return value of i when message is not delete code
def deleteIrcToDiscordMsgThread():
global discordMSG, haltDeleteMSG, haltDiscordMSG
while True:
#print(discordMSG)
#print("{0} : {1}".format(haltDeleteMSG,haltDiscordMSG))
if haltDeleteMSG == 0:
haltDiscordMSG = 1
#shiftValue = checkDeleteCode(discordMSG)
#discordMSG = shift(shiftValue, discordMSG)
haltDiscordMSG = 0
#print(discordMSG)
time.sleep(4)
#this code is old and unnessisary at this minute must be rewritten.
# first run stuff
# def getToken(): #gets the token
# global config
# realToken = "false" #this is just for the while loop
# while realToken == "false":
# config["discordToken"] = input("Discord Token: ") #gets the user input
# try:
# client.run(config["discordToken"]) #atempts to run it and if it fails then execute the next bit of code if not then save it and go on
# except:
# print("Please enter a valid token")
# sys.exit(0) #this is a work around for the bug that causes the code not think the discord token is valid even tho it is after the first time of it being invalid
# else:
# realToken = "true"
# async def getFirstRunInfo():
# global config
# print('Logged in as') ##these things could be changed a little bit here
# print(client.user.name)
# print(client.user.id)
# while config["serverName"] == "":
# for server in client.servers: #this sifts through all the bots servers and gets the channel we want
# print(server.name)
# if input("If this is the server you want type yes if not hit enter: ") == "yes":
# config["serverName"] = server.name
# break
# while config["channelName"] == "":
# for server in client.servers: #this sifts through all the bots servers and gets the channel we want
# # should probly add a check in for the server in here im guessing
# # print(server.name)
# for channel in server.channels:
# if str(channel.type) == "text":
# print(channel.name)
# if input("If this is the channel you want type yes if not hit enter: ") == "yes":
# config["channelName"] #starts the discord bot= channel.name
# break
# while config["IRCToDiscordFormatting"] == "": #fills the youtube to discord formating
# config["IRCToDiscordFormatting"] = input("""Please enter the chat formatting for chat coming from irc to go to discord.
# {1} is the placeholder for the username
# {2} is the placeholder for the message
# Ex. "{0} : {1}: """)
# while config["discordToIRCformating"] == "": #fills the discord to youtube formating
# config["discordToIRCFormating"] = input("""Please enter the chat formatting for chat coming from discord to go to irc.
# {0} is the placeholder for the username
# {1} is the placeholder for the message
# Ex. "{0} : {1}": """)
# print("Configuration complete")
# fileSave("config-test.json",config) #saves the file
# print("Please run the command normally to run the bot")
# await client.close()
# if os.path.isfile("config-test.json") == False:#checks if the file exists and if it doesnt then we go to creating it
# print("Config missing. This may mean this is your first time setting this up")
# firstRun = "on"
# else:
# config = fileLoad("config-test.json") #if it exists try to load it
# if firstRun == "on":
# config = {"channelName": "", "pageToken": "", "serverName": "", "discordToken": "","discordToIRCFormating": "", "IRCToDiscordFormatting":""}
# getToken()
variables.config = fileIO.fileLoad("config-test.json")
#this starts everything for the irc client
##main loop for the code
#deleteThread = threading.Thread(target=deleteIrcToDiscordMsgThread) #this is broken and needs rewriting
#deleteThread.start()
#mainBot.mainBot().main()
print("test")
chatControlThread = threading.Thread(target=mainBot.mainBot().main)
chatControlThread.start()
ircCheckThread = threading.Thread(target=irc.ircCheck)#starts my irc check thread which should print false if the irc thread dies.
if variables.config["Bot"]["IRC"]["Enabled"] == True:
ircCheckThread.start()
print("IRC Loaded")
else:
print("IRC not loaded")
youtubeChatThread = threading.Thread(target=youtube.youtubeChatControl)#starts my youtube chat thread
if variables.config["Bot"]["Youtube"]["Enabled"] == True:
youtubeChatThread.start()
print("Youtube Loaded")
else:
print("Youtube not loaded")
#discordThread = threading.Thread(target=discordBot.client.run(variables.config["Bot"]["Discord"]["Token"]))#creates the thread for the discord bot
discordThread = threading.Thread(target=discordBot.start(variables.config["Bot"]["Discord"]["Token"]))#creates the thread for the discord bot
if variables.config["Bot"]["Discord"]["Enabled"] == True:
print("Discord Loaded")
discordThread.start()
else:
print("Discord not loaded")
# twitchBot().getViewerCount()
# print("ye")
| gpl-3.0 | -9,078,400,756,142,802,000 | 36.277487 | 174 | 0.676545 | false | 3.7812 | true | false | false |
bsodhi/flipbrain | gae_app/flipbrain/trails.py | 1 | 10373 | """
Copyright 2017 Balwinder Sodhi
Licenced under MIT Licence as available here:
https://opensource.org/licenses/MIT
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
Created on Mar 3, 2017
@author: Balwinder Sodhi
"""
import urllib
import urllib2
from common import *
from entities import *
class YouTubeUtils:
"""
Utility class for handling youtube API calls.
"""
def __init__(self, dev_key):
self.YT_URL = "https://www.googleapis.com/youtube/v3/"
self.DEVELOPER_KEY = dev_key
def get_playlist_items(self, pl_id):
"""
Fetches the youtube playlist items information via youtube API.
:param pl_id: Playlist ID
:return: List of videos in the playlist.
"""
videos = []
try:
data = dict()
data['key'] = self.DEVELOPER_KEY
data['playlistId'] = pl_id
data['part'] = "contentDetails,snippet"
data['maxResults'] = 50
data['fields'] = "items(snippet(title,description,resourceId/videoId))"
qs = urllib.urlencode(data)
url = "{}playlistItems?{}".format(self.YT_URL, qs)
result = urllib2.urlopen(url)
res_dict = json.loads(result.read())
for item in res_dict.get("items", []):
vid = item['snippet']['resourceId']['videoId']
title = item['snippet']['title']
videos.append((vid, title))
except urllib2.URLError:
logging.exception('Caught exception fetching url')
return videos
def get_playlist_info(self, pl_id):
"""
Fetches the information such as title and description etc. of
the given youtube playlist.
:param pl_id: Playlist ID.
:return: Tuple (title, description)
"""
pl_info = None
try:
data = dict()
data['key'] = self.DEVELOPER_KEY
data['id'] = pl_id
data['part'] = "snippet"
data['fields'] = "items(snippet(title,description))"
qs = urllib.urlencode(data)
url = "{}playlists?{}".format(self.YT_URL, qs)
result = urllib2.urlopen(url)
res_dict = json.loads(result.read())
# Expected max one item
for item in res_dict.get("items", []):
title = item['snippet']['title']
desc = item['snippet']['description']
pl_info = (title, desc)
except urllib2.URLError:
logging.exception('Caught exception fetching url')
return pl_info
def get_video_details(self, v_id):
"""
Fetches details about a youtube video via youtube API.
:param v_id: A comma separated list of video IDs.
:return: List of video detail dictionaries.
"""
video_list = []
try:
data = dict()
data['key'] = self.DEVELOPER_KEY
data['id'] = v_id
data['part'] = "snippet"
# data['maxResults'] = 50
data['fields'] = "items(id,snippet(title,description,tags))"
qs = urllib.urlencode(data)
url = "{}videos?{}".format(self.YT_URL, qs)
logging.info(">>>>> YT URL = %s", url)
result = urllib2.urlopen(url)
res_dict = json.loads(result.read())
if "items" in res_dict:
for item in res_dict["items"]:
video = dict()
video["title"] = item["snippet"]["title"]
video["description"] = item["snippet"]["description"]
video["tags"] = ", ".join(item["snippet"]["tags"])
video["itemId"] = v_id
video["url"] = "http://youtu.be/%s" % v_id
video_list.append(video)
except urllib2.URLError:
logging.exception('Caught exception fetching url')
return video_list
class TrailHandler(BaseHandler):
"""
Handler for trail related HTTP requests.
"""
def getTrailById(self, for_view=False):
"""
:param for_view:
:return:
"""
t = TrailDto.get_by_id(long(self.request.params["trailId"]))
td = t.to_dict_with_id('trailId')
if for_view:
tv_list = TrailViewsDto.query(TrailViewsDto.trail == t.key).fetch()
if not tv_list:
tv = TrailViewsDto(views=1, trail=t.key)
else:
tv = tv_list[0]
tv.views += 1
tv.put()
td['viewsCount'] = tv.views
td['assessments'] = []
tas = TrailAssessmentDto.query(TrailAssessmentDto.trail == t.key).fetch()
if tas:
a_list = AssessmentDto.query(
AssessmentDto.key.IN([a.assess for a in tas])).fetch()
td['assessments'] = [a.to_dict_with_id("assessId") for a in a_list]
else:
logging.info("No trail assessments found.")
self.send_json_response(Const.STATUS_OK, td)
# def addYTContent(self):
# f = json.loads(self.request.body)
# urls = f.get("url")
# vids = []
# if urls:
# yt = YouTubeUtils()
# for u in urls.split("\n"):
# c = VideoDto()
# v = yt.get_video_details(u.split("/")[-1])
# c.populate_from_dict(v)
# c.put()
# vids.append(c.to_dict_with_id("videoId"))
#
# self.send_json_response(Const.STATUS_OK, "Added videos.")
def addYTrail(self):
"""
:return:
"""
pd = self.request.params
t = TrailDto(tags=pd['tags'])
t.resources = []
yt = YouTubeUtils(self.get_setting(Const.CFG_YOUTUBE_KEY))
if 'isPL' in pd and pd['isPL']:
pl_info = yt.get_playlist_info(pd['resource'])
if not pl_info:
raise ValueError("Playlist not found!")
t.title = pl_info[0]
p = yt.get_playlist_items(pd['resource'])
for vid in p:
c = VideoDto()
c.description = vid[1]
c.title = vid[1]
c.itemId = vid[0]
c.url = "https://youtu.be/%s" % vid[0]
t.videos.append(c)
else:
vid_list = yt.get_video_details(pd['resource'])
for v in vid_list:
c = VideoDto()
c.populate_from_dict(v)
t.videos.append(c)
t.owner = self.get_current_user_key()
if 'title' in pd:
t.title = pd['title']
t.put()
self.send_json_response(Const.STATUS_OK, t.to_dict_with_id('trailId'))
def saveContent(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def deleteContent(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def saveTrail(self):
"""
:return:
"""
tf = json.loads(self.request.body)
t = TrailDto()
if 'trailId' in tf:
t = TrailDto.get_by_id(int(tf['trailId']))
logging.debug("Loaded trail from DB tid=%s", tf['trailId'])
t.populate_from_dict(tf)
t_key = t.put()
# Clear old trail assessments
ta_list = TrailAssessmentDto.query(TrailAssessmentDto.trail == t_key).fetch()
if ta_list:
ndb.delete_multi([x.key for x in ta_list])
# Insert newly selected assessments for trail
if 'assessments' in tf:
for ta_dict in tf['assessments']:
ta = TrailAssessmentDto()
ta.trail = t_key
ta.assess = ndb.Key(AssessmentDto, ta_dict['assessId'])
ta.put()
trl = t.to_dict_with_id("trailId")
trl['assessments'] = tf['assessments']
logging.debug("Saved trail to DB tid=%s", t_key.id())
self.send_json_response(Const.STATUS_OK, trl)
def deleteTrail(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def getTrailForView(self):
"""
:return:
"""
self.getTrailById(for_view=True)
def searchComments(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def addSubs(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def getComments(self):
"""
:return:
"""
v_id = self.request.params["iid"]
qry = CommentDto.query(CommentDto.trailItemId == str(v_id))
cl = qry.fetch()
self.send_json_response(Const.STATUS_OK, [c.to_dict_with_id("commentId") for c in cl])
def deleteComment(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def saveComment(self):
"""
:return:
"""
frm = json.loads(self.request.body)
if "commentId" in frm:
c = CommentDto.get_by_id(long(frm["commentId"]))
if c:
if c.owner != self.get_current_user_key():
raise ValueError("Cannot save comment not owned by current user.")
c.populate_from_dict(frm)
c.put()
result = c.to_dict_with_id("commentId")
else:
c = CommentDto(owner=self.get_current_user_key())
c.populate_from_dict(frm)
u = self.get_current_user()
c.author = "%s %s" % (u['firstName'], u['lastName'])
c.put()
result = c.to_dict_with_id("commentId")
self.send_json_response(Const.STATUS_OK, result)
def getSubsForUser(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def getContentsForUser(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def search(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
| mit | 7,271,796,624,487,278,000 | 31.114551 | 94 | 0.539863 | false | 3.739366 | false | false | false |
esboych/shaker | shaker/engine/messaging.py | 1 | 1734 | # Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
import zmq
from shaker.engine import utils
LOG = logging.getLogger(__name__)
class MessageQueue(object):
def __init__(self, endpoint):
_, port = utils.split_address(endpoint)
context = zmq.Context()
self.socket = context.socket(zmq.REP)
self.socket.bind("tcp://*:%s" % port)
LOG.info('Listening on *:%s', port)
def __iter__(self):
try:
while True:
# Wait for next request from client
message = self.socket.recv_json()
LOG.debug('Received request: %s', message)
def reply_handler(reply_message):
self.socket.send_json(reply_message)
LOG.debug('Sent reply: %s', reply_message)
try:
yield message, reply_handler
except GeneratorExit:
break
except BaseException as e:
if isinstance(e, KeyboardInterrupt): # SIGINT is ok
LOG.info('Process is interrupted')
else:
LOG.exception(e)
raise
| apache-2.0 | -6,983,363,085,708,255,000 | 30.527273 | 69 | 0.603806 | false | 4.345865 | false | false | false |
kingfisher1337/tns | ising_ctm/plot.py | 1 | 2059 | import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import os
mpl.rcParams["text.usetex"] = True
mpl.rcParams["text.latex.preamble"] = "\usepackage{bm}"
for filename in sorted(os.listdir("output")):
if filename.startswith("sigmax_sigmaz_T_"):
print filename
T = []
mx = []
mz = []
err = []
with open("output/" + filename, "r") as f:
for line in f:
fields = line.split(" ")
T.append(float(fields[0]))
mx.append(float(fields[1]))
mz.append(float(fields[2]))
if len(fields) > 3:
err.append(float(fields[3]))
if len(err) > 0:
plt.errorbar(T, mx, marker="x", label="$\\langle \\sigma_x \\rangle_{\\infty}$", yerr=err)
plt.errorbar(T, mz, marker="x", label="$\\langle \\sigma_z \\rangle_{\\infty}$", yerr=err)
else:
plt.plot(T, mx, marker="x", label="$\\langle \\sigma_x \\rangle_{\\infty}$")
plt.plot(T, mz, marker="x", label="$\\langle \\sigma_z \\rangle_{\\infty}$")
plt.legend(loc=1)
plt.xlabel("$T$")
plt.ylim(0,1)
plt.grid(True)
plt.savefig("plots/" + filename.split(".dat")[0] + ".png")
plt.close()
elif filename.startswith("sigmazsigmaz_T_"):
print filename
T = []
zz = []
err = []
with open("output/" + filename, "r") as f:
for line in f:
fields = line.split(" ")
T.append(float(fields[0]))
zz.append(float(fields[1]))
if len(fields) > 2:
err.append(float(fields[2]))
if len(err) > 0:
plt.errorbar(T, zz, marker="x", yerr=err)
else:
plt.plot(T, zz, marker="x")
plt.xlabel("$T$")
plt.ylabel("$\\langle \\sigma_z^{(j)} \\sigma_z^{(j+1)} \\rangle$")
plt.grid(True)
plt.savefig("plots/" + filename.split(".dat")[0] + ".png")
plt.close()
| gpl-3.0 | 7,543,500,244,433,566,000 | 35.122807 | 102 | 0.48713 | false | 3.364379 | false | false | false |
arizona-phonological-imaging-lab/Autotrace | matlab-version/fixImages.py | 3 | 2423 | import os, sys, subprocess
import Image
import pygtk
pygtk.require('2.0')
import gtk, gobject
import gtk.glade
class ImageFixer:
def __init__(self, filenames):
self.gladefile = "LinguaViewer.glade"
self.wTree = gtk.glade.XML(self.gladefile, "resize")
self.window = self.wTree.get_widget("resize")
self.window.set_size_request(400, 100)
self.window.connect("destroy", self.destroy_progress)
self.pbar = self.wTree.get_widget("progressbar1")
self.pbar.show()
self.val = 0.0
self.frac = 1.0/len(filenames)
self.pbar.set_fraction(self.val)
result = self.check(filenames)
if result == gtk.RESPONSE_OK:
task = self.fix(filenames)
gobject.idle_add(task.next)
else:
self.window.destroy()
def check(self, filenames):
#check whether we need to do correction
badcount = 0
for i in filenames:
im = Image.open(i)
if (im.size[0] != 720): #or (im.size[1] != 480):
badcount += 1
break
if badcount > 0:
dlg = gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_WARNING,
gtk.BUTTONS_OK_CANCEL,
"It appears that 1 or more images need to be resized.\nResizing the images will overwrite the originals. Continue?")
result = dlg.run()
dlg.destroy()
else:
result = gtk.RESPONSE_CANCEL
return result
def fix(self, files):
l = len(files)
c = 0
for j in files:
im = Image.open(j)
if (im.size[0] != 720) or (im.size[1] != 480):
cmd = ['convert', j, '-shave', '126x0', j]
p = subprocess.Popen(cmd)
p.wait()
cmd = ['convert', j, '-chop', '12x0', j]
p = subprocess.Popen(cmd)
p.wait()
cmd = ['convert', j, '-resize', '720x480!', j]
p = subprocess.Popen(cmd)
#p.wait()
self.val += self.frac
self.pbar.set_fraction(self.val)
c += 1
if c < l:
yield True
else:
yield False
def destroy_progress(self, event):
self.window.destroy()
| mit | -5,566,361,319,025,989,000 | 29.670886 | 132 | 0.496079 | false | 3.864434 | false | false | false |
migasfree/migasfree | migasfree/server/models/status_log.py | 1 | 1086 | # -*- coding: utf-8 *-*
from django.db import models
from django.utils.translation import ugettext_lazy as _
from .computer import Computer
from .event import Event
class DomainStatusLogManager(models.Manager):
def scope(self, user):
qs = super(DomainStatusLogManager, self).get_queryset()
if not user.is_view_all():
qs = qs.filter(computer_id__in=user.get_computers())
return qs
class StatusLogManager(DomainStatusLogManager):
def create(self, computer):
obj = StatusLog()
obj.computer = computer
obj.status = computer.status
obj.save()
return obj
class StatusLog(Event):
status = models.CharField(
verbose_name=_('status'),
max_length=20,
null=False,
choices=Computer.STATUS_CHOICES,
default='intended'
)
objects = StatusLogManager()
class Meta:
app_label = 'server'
verbose_name = _("Status Log")
verbose_name_plural = _("Status Logs")
permissions = (("can_save_statuslog", "Can save Status Log"),)
| gpl-3.0 | -8,389,321,136,981,804,000 | 23.681818 | 70 | 0.626151 | false | 3.963504 | false | false | false |
danfleck/Class-Chord | network-client/src/gmu/chord/FingerEntry.py | 1 | 1208 | ''' Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
Created on Feb 24, 2014
@author: dfleck
'''
import math
class FingerEntry:
'''Represents an entry in the finger table.
Note: Finger indexes go from 0-->m-1 which is different than the
Chord paper which goes from 1-->m
'''
m = 128 # Number of bits in entry set
def __init__(self, k, n, nodeLocation):
'''k is the finger table entry.
n is the node ID of the node holding this entry
'''
#print("DEBUG: fingerINIT: %d %d " % (k-1,n))
twoToTheM = math.pow(2, FingerEntry.m)
self.start = n + math.pow(2, k-1) % twoToTheM
self.intervalStart = self.start
self.intervalEnd = n + math.pow(2, k) % twoToTheM
self.nodeLocation = nodeLocation # This is the succ on the tables in the Chord paper
def __str__(self):
if self.nodeLocation is None:
nodeId = -999
else:
nodeId = self.nodeLocation.id
return "Start:%d End:%d NodeLocation:%d" % (self.start, self.intervalEnd, nodeId) | apache-2.0 | 593,880,393,867,182,500 | 30.815789 | 112 | 0.580298 | false | 3.682927 | false | false | false |
LearningRegistry/LearningRegistry | LR/lr/util/validator.py | 2 | 5077 | # Copyright 2011 SRI International
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from lxml import etree
from pylons import config
import logging, os, re, subprocess
_log = logging.getLogger(__name__)
namespaces = {
"oai" : "http://www.openarchives.org/OAI/2.0/",
"lr" : "http://www.learningregistry.org/OAI/2.0/",
"oai_dc" : "http://www.openarchives.org/OAI/2.0/oai_dc/",
"oai_lr" : "http://www.learningregistry.org/OAI/2.0/oai_dc/",
"dc":"http://purl.org/dc/elements/1.1/",
"dct":"http://purl.org/dc/terms/",
"nsdl_dc":"http://ns.nsdl.org/nsdl_dc_v1.02/",
"ieee":"http://www.ieee.org/xsd/LOMv1p0",
"xsi":"http://www.w3.org/2001/XMLSchema-instance"
}
class XercesValidator():
def __init__(self):
def is_exe(fpath):
return os.path.exists(fpath) and os.access(fpath, os.X_OK)
if "xerces-c.StdInParse" in config and is_exe(config["xerces-c.StdInParse"]):
self.stdinparse = [config["xerces-c.StdInParse"], '-n', '-f', '-s']
self.enabled = True
else:
self.enabled = False
def validate(self, contents=""):
errors = []
if self.enabled:
process = subprocess.Popen(self.stdinparse, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
xmlin = contents
(_, stderr) = process.communicate(input=xmlin.encode("utf8"))
if stderr != None or stderr != "":
err_lines = stderr.splitlines()
for err in err_lines:
m = re.match('''.*\s+line\s+([0-9]+),\s+char\s+([0-9]+)\):\s*(.*)$''', err)
if m is not None:
errors.append({ "line": m.group(1), 'char': m.group(2), 'msg': m.group(3) })
else:
_log.info("Xerces not available for validation.")
return errors
_validator = XercesValidator()
def validate_xml_content_type(res):
content_type = None
try:
content_type = res.headers['Content-Type']
except:
try:
content_type = res.headers['content-type']
except:
pass
assert re.match("""text/xml;\s*charset=utf-8""", content_type) != None , '''Expected Content Type: "text/xml; charset=utf-8" Got: "%s"''' % content_type
def validate_json_content_type(res):
content_type = None
try:
content_type = res.headers['Content-Type']
except:
try:
content_type = res.headers['content-type']
except:
pass
assert re.match("""application/json;\s*charset=utf-8""", content_type) != None , '''Expected Content Type: "application/json; charset=utf-8" Got: "%s"''' % content_type
def parse_response(response):
body = response.body
xmlcontent = etree.fromstring(body)
return { "raw": body, "etree": xmlcontent }
def validate_lr_oai_etree(xmlcontent, errorExists=False, checkSchema=False, errorCodeExpected=None):
error = xmlcontent.xpath("//*[local-name()='error']", namespaces=namespaces)
if errorExists == False:
if len(error) > 0:
assert 0 == len(error), "validate_lr_oai_etree FAIL: Error code:{0} mesg:{1}".format(error[0].xpath("@code", namespaces=namespaces)[0], error[0].xpath("text()", namespaces=namespaces)[0])
elif errorExists and errorCodeExpected != None:
codeReceived = error[0].xpath("@code", namespaces=namespaces)[0]
if errorCodeExpected != codeReceived:
assert 0 == len(error), "validate_lr_oai_etree FAIL: Expected:{2}, Got Error code:{0} mesg:{1}".format(error[0].xpath("@code", namespaces=namespaces)[0], error[0].xpath("text()", namespaces=namespaces)[0], errorCodeExpected)
else:
assert 1 == len(error), "validate_lr_oai_etree FAIL: Expected error, none found."
def validate_lr_oai_response( response, errorExists=False, checkSchema=False, errorCodeExpected=None):
validate_xml_content_type(response)
obj = parse_response(response)
xmlcontent = obj["etree"]
validate_lr_oai_etree(xmlcontent, errorExists, checkSchema, errorCodeExpected)
schemaErrors = _validator.validate(obj["raw"])
assert len(schemaErrors) == 0, "validate_lr_oai_response: Schema validation error:\n%s" % '\n'.join(map(lambda x: "\t(line: {0}, char: {1}): {2}".format(x["line"], x["char"], x["msg"]), schemaErrors))
| apache-2.0 | -7,235,573,686,780,789,000 | 41.316667 | 236 | 0.603506 | false | 3.663059 | false | false | false |
ihor-pyvovarnyk/oae-sound-processing-tool | app/ffmpeg/kernel.py | 1 | 1782 | import functools
from .services import CommandBuilderService
from .services import SchemaCompilerService
from .services import SchemasProviderService
class Kernel(object):
def __init__(self, ffmpeg_home_dir):
self.ffmpeg_home = ffmpeg_home_dir
self.command_builder_service = CommandBuilderService()
self.schema_compiler_service = SchemaCompilerService()
self.schemas_provider_service = SchemasProviderService()
self.options_stack = []
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
elif self.has_option(name):
return functools.partial(self.option, name)
else:
raise AttributeError, name
def bootstrap(self):
self.command_builder_service.bootstrap()
self.schema_compiler_service.bootstrap()
self.schemas_provider_service.bootstrap()
def has_option(self, option_name):
return bool(self.schemas_provider_service.schema(option_name))
def option(self, name, **kwargs):
schema = self.schemas_provider_service.schema(name)
if not schema:
return None
else:
compiled_option = self.schema_compiler_service.compile(schema, kwargs)
self.options_stack.append(compiled_option)
return self
def execute(self):
command_parts = map(lambda o: o.split(' '), self.options_stack)
command_parts = reduce(lambda x, y: x + y, command_parts)
self.command_builder_service.push(self.ffmpeg_home)
for part in command_parts:
self.command_builder_service.push(part)
print self.command_builder_service.command_parts_stack
self.command_builder_service.run()
self.options_stack = []
| bsd-2-clause | 2,849,927,385,072,746,000 | 40.44186 | 82 | 0.656566 | false | 4.183099 | false | false | false |
calancha/DIRAC | RequestManagementSystem/test/FileTests.py | 4 | 6001 | ########################################################################
# $HeadURL$
# File: FileTest.py
# Author: [email protected]
# Date: 2012/08/06 13:48:54
########################################################################
""" :mod: FileTest
=======================
.. module: FileTest
:synopsis: test cases for Files
.. moduleauthor:: [email protected]
test cases for Files
"""
__RCSID__ = "$Id$"
# #
# @file FileTest.py
# @author [email protected]
# @date 2012/08/06 13:49:05
# @brief Definition of FileTest class.
# # imports
import unittest
# # from DIRAC
from DIRAC.RequestManagementSystem.Client.Operation import Operation
# # SUT
from DIRAC.RequestManagementSystem.Client.File import File
########################################################################
class FileTests( unittest.TestCase ):
"""
.. class:: FileTest
"""
def setUp( self ):
""" test setup """
self.fromDict = { "Size" : 1, "LFN" : "/test/lfn", "ChecksumType" : "ADLER32", "Checksum" : "123456", "Status" : "Waiting" }
def tearDown( self ):
""" test tear down """
del self.fromDict
def test01ctors( self ):
""" File construction and (de)serialisation """
# # empty default ctor
theFile = File()
self.assertEqual( isinstance( theFile, File ), True )
# # fromDict
try:
theFile = File( self.fromDict )
except AttributeError, error:
print "AttributeError: %s" % str( error )
self.assertEqual( isinstance( theFile, File ), True )
for key, value in self.fromDict.items():
self.assertEqual( getattr( theFile, key ), value )
toJSON = theFile.toJSON()
self.assertEqual( toJSON["OK"], True, "JSON serialization error" )
def test02props( self ):
""" test props and attributes """
theFile = File()
# valid props
theFile.FileID = 1
self.assertEqual( theFile.FileID, 1 )
theFile.Status = "Done"
self.assertEqual( theFile.Status, "Done" )
theFile.LFN = "/some/path/somewhere"
self.assertEqual( theFile.LFN, "/some/path/somewhere" )
theFile.PFN = "/some/path/somewhere"
self.assertEqual( theFile.PFN, "/some/path/somewhere" )
theFile.Attempt = 1
self.assertEqual( theFile.Attempt, 1 )
theFile.Size = 1
self.assertEqual( theFile.Size, 1 )
theFile.GUID = "2bbabe80-e2f1-11e1-9b23-0800200c9a66"
self.assertEqual( theFile.GUID, "2bbabe80-e2f1-11e1-9b23-0800200c9a66" )
theFile.ChecksumType = "adler32"
self.assertEqual( theFile.ChecksumType, "ADLER32" )
theFile.Checksum = "123456"
self.assertEqual( theFile.Checksum, "123456" )
# #
theFile.Checksum = None
theFile.ChecksumType = None
self.assertEqual( theFile.Checksum, "" )
self.assertEqual( theFile.ChecksumType, "" )
# # invalid props
# FileID
try:
theFile.FileID = "foo"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
# parent
parent = Operation( { "OperationID" : 99999 } )
parent += theFile
theFile.FileID = 0
self.assertEqual( parent.OperationID, theFile.OperationID )
try:
theFile.OperationID = 111111
except Exception, error:
self.assertEqual( isinstance( error, AttributeError ), True )
self.assertEqual( str( error ), "can't set attribute" )
# LFN
try:
theFile.LFN = 1
except Exception, error:
self.assertEqual( isinstance( error, TypeError ), True )
self.assertEqual( str( error ), "LFN has to be a string!" )
try:
theFile.LFN = "../some/path"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "LFN should be an absolute path!" )
# PFN
try:
theFile.PFN = 1
except Exception, error:
self.assertEqual( isinstance( error, TypeError ), True )
self.assertEqual( str( error ), "PFN has to be a string!" )
try:
theFile.PFN = "snafu"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "Wrongly formatted PFN!" )
# Size
try:
theFile.Size = "snafu"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
try:
theFile.Size = -1
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "Size should be a positive integer!" )
# GUID
try:
theFile.GUID = "snafuu-uuu-uuu-uuu-uuu-u"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "'snafuu-uuu-uuu-uuu-uuu-u' is not a valid GUID!" )
try:
theFile.GUID = 2233345
except Exception, error:
self.assertEqual( isinstance( error, TypeError ), True )
self.assertEqual( str( error ), "GUID should be a string!" )
# Attempt
try:
theFile.Attempt = "snafu"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
try:
theFile.Attempt = -1
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "Attempt should be a positive integer!" )
# Status
try:
theFile.Status = None
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "Unknown Status: None!" )
# Error
try:
theFile.Error = Exception( "test" )
except Exception, error:
self.assertEqual( isinstance( error, TypeError ), True )
self.assertEqual( str( error ), "Error has to be a string!" )
# # test execution
if __name__ == "__main__":
testLoader = unittest.TestLoader()
fileTests = testLoader.loadTestsFromTestCase( FileTests )
suite = unittest.TestSuite( [ fileTests ] )
unittest.TextTestRunner( verbosity = 3 ).run( suite )
| gpl-3.0 | 7,929,290,984,637,835,000 | 29.461929 | 128 | 0.617897 | false | 3.71349 | true | false | false |
artwr/airflow | airflow/ti_deps/deps/not_in_retry_period_dep.py | 20 | 2164 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.ti_deps.deps.base_ti_dep import BaseTIDep
from airflow.utils import timezone
from airflow.utils.db import provide_session
from airflow.utils.state import State
class NotInRetryPeriodDep(BaseTIDep):
NAME = "Not In Retry Period"
IGNOREABLE = True
IS_TASK_DEP = True
@provide_session
def _get_dep_statuses(self, ti, session, dep_context):
if dep_context.ignore_in_retry_period:
yield self._passing_status(
reason="The context specified that being in a retry period was "
"permitted.")
return
if ti.state != State.UP_FOR_RETRY:
yield self._passing_status(
reason="The task instance was not marked for retrying.")
return
# Calculate the date first so that it is always smaller than the timestamp used by
# ready_for_retry
cur_date = timezone.utcnow()
next_task_retry_date = ti.next_retry_datetime()
if ti.is_premature:
yield self._failing_status(
reason="Task is not ready for retry yet but will be retried "
"automatically. Current date is {0} and task will be retried "
"at {1}.".format(cur_date.isoformat(),
next_task_retry_date.isoformat()))
| apache-2.0 | 4,834,235,920,958,033,000 | 39.830189 | 90 | 0.660351 | false | 4.285149 | false | false | false |
chapman-phys227-2016s/cw-2-classwork-team | read_2columns.py | 1 | 1894 | #! /usr/bin/env python
"""
File: read_2columns.py
Copyright (c) 2016 Taylor Patti
License: MIT
This module plots the points of two lists of data, as well as printing the maximum and minimum values of the generated arrays.
"""
import numpy as np
import matplotlib.pyplot as plt
def data_prep1(filename='xy.dat'):
"""Processes files of two columns into two seperate, vectorized data sets."""
infile = open(filename)
x_data = []
y_data = []
for line in infile:
data = line.split()
x_data.append(float(data[0]))
y_data.append(float(data[1]))
x_data = np.array(x_data)
y_data = np.array(y_data)
return x_data, y_data
def data_prep2(filename='xy.dat'):
"""Processes files of two columns into two seperate, vectorized data sets."""
data = np.loadtxt(filename, dtype=np.float)
return data[:,0], data[:,1]
def data_plot_display(xinfo=data_prep1()[0], yinfo=data_prep1()[1]):
"""Plots the data given and prints the max and mean of both data sets."""
print 'Max x: ' + str(np.amax(xinfo))
print 'Max y: ' + str(np.amax(yinfo))
print 'Mean x: ' + str(np.mean(xinfo))
print 'Mean y: ' + str(np.mean(yinfo))
plt.plot(xinfo, yinfo, 'bo')
plt.title('Unspecified Plot Data')
plt.xlabel('x-axis data')
plt.ylabel('y-axis data')
plt.show()
def test_other_file1(filename='testerfile.dat'):
"""Uses a specified testfile to ensure that the indexes were correct."""
test_run = data_prep1(filename)
apt = (test_run[0][2] == 2) and (test_run[1][2] == 2)
msg = 'Values indexed incorrectly.'
assert apt
def test_other_file2(filename='testerfile.dat'):
"""Uses a specified testfile to ensure that the indexes were correct."""
test_run = data_prep2(filename)
apt = (test_run[0][2] == 2) and (test_run[1][2] == 2)
msg = 'Values indexed incorrectly.'
assert apt
| mit | 8,645,625,074,339,591,000 | 31.101695 | 126 | 0.643611 | false | 3.167224 | true | false | false |
Letractively/timeside | timeside/encoder/flac.py | 2 | 2350 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2007-2009 Guillaume Pellerin <[email protected]>
# This file is part of TimeSide.
# TimeSide is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# TimeSide is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with TimeSide. If not, see <http://www.gnu.org/licenses/>.
# Author: Guillaume Pellerin <[email protected]>
from timeside.core import Processor, implements, interfacedoc
from timeside.encoder.core import GstEncoder
from timeside.api import IEncoder
from timeside.tools import *
class FlacEncoder(GstEncoder):
""" gstreamer-based FLAC encoder """
implements(IEncoder)
@interfacedoc
def setup(self, channels=None, samplerate=None, blocksize=None, totalframes=None):
super(FlacEncoder, self).setup(channels, samplerate, blocksize, totalframes)
self.pipe = ''' appsrc name=src ! audioconvert
! flacenc '''
if self.filename and self.streaming:
self.pipe += ''' ! tee name=t
! queue ! filesink location=%s
t. ! queue ! appsink name=app sync=False
''' % self.filename
elif self.filename :
self.pipe += '! filesink location=%s async=False sync=False ' % self.filename
else:
self.pipe += '! queue ! appsink name=app sync=False '
self.start_pipeline(channels, samplerate)
@staticmethod
@interfacedoc
def id():
return "gst_flac_enc"
@staticmethod
@interfacedoc
def description():
return "FLAC GStreamer based encoder"
@staticmethod
@interfacedoc
def format():
return "FLAC"
@staticmethod
@interfacedoc
def file_extension():
return "flac"
@staticmethod
@interfacedoc
def mime_type():
return 'audio/x-flac'
@interfacedoc
def set_metadata(self, metadata):
self.metadata = metadata
| gpl-2.0 | 4,481,031,754,363,007,000 | 28.375 | 89 | 0.66766 | false | 3.923205 | false | false | false |
mozilla-it/yammer | python/Yammer-ReadThread.py | 1 | 3150 | import time
import yampy
#Secrets scrubbed
client_id = ""
client_secret = ""
redirect_uri = ""
code = ""
access_token = ""
newestMsgID = 0
currentID = 0
MAX_MSG_LIMIT = 10000000000
currentPgLowerMsgID = MAX_MSG_LIMIT
users = dict()
moreToProcess = True
MAX_MSG_PER_PAGE = 20
restCall = 0 # Keep track of how many times we make web calls due to API limits / throttling
MAX_REQ_PER_INTERVAL = 10 # How many Yammer requests you can make in 30 seconds. Once reached, wait 30 seconds.
# Various Yammer threads for testing
GMAIL_THREAD = 414357831 # 268-ish
AURORA_THREAD = 387871026 # 12 messages
PASTEBIN_THREAD = 421373941 # Exactly 20 messages (as of 27-JUL-2014)
# Setup authenticator - Don't delete any of this! You'll need it when the access token expires
authenticator = yampy.Authenticator(client_id, client_secret)
#auth_url = authenticator.authorization_url(redirect_uri)
#print(auth_url) #Debug: show the code to stdout
#access_token = authenticator.fetch_access_token(code)
#print(access_token)
#Get your Yammer object for making requests
yammer = yampy.Yammer(access_token)
# Create a dictionary from the Yammer messages.
# The RESTful API to the "messages" endpoint will result in one response with two blocks of structures within:
# 1. messages: the actual posts/replies/polls within the message thread
# 2. references: usually users.
# Start by grabbing the latest reply in thread and go backwards from there using message ID.
# Start without newer_than or older_than parameters to get the newestMsgID.
while moreToProcess:
# Be respectful of Yammer API limits; else we get throttled / banned.
restCall += 1
if restCall % MAX_REQ_PER_INTERVAL == 0:
time.sleep(31) # Pause for a little more than 30 seconds ever MAX_REQ_PER_INTERVAL requests
# Grab the latest set of messages in the thread and set newestMsgID
yammerMessages = dict(yammer.messages.in_thread(GMAIL_THREAD, older_than=currentPgLowerMsgID))
# Read the latest set messages and users who posted them
# Users: Load up the id:full_name key/value pair dictionary now
for user in yammerMessages.get("references"):
users[user.get("id")] = user.get("full_name") # The format here is dictionary[key]=value
# Messages:
for message in yammerMessages.get("messages"):
# Note: in the messages context, sender_id is the same integer as "id" in the references context.
print(users[message.get("sender_id")], ":", sep='') # Don't add space between user and colon character
#Get the currentID, and set newestMsgID
currentID = message.get("id")
if currentID > newestMsgID:
newestMsgID = currentID
#Set the current page's lowest ID to the current ID.
currentPgLowerMsgID = currentID
print("ID:", currentID)
print("newestMsgID:", newestMsgID)
print(message.get("body").get("plain"))
print(
"=========================================================================================================")
if len(yammerMessages.get("messages")) < MAX_MSG_PER_PAGE:
moreToProcess = False
| mpl-2.0 | -7,432,924,009,419,899,000 | 36.951807 | 120 | 0.685079 | false | 3.645833 | false | false | false |
AdamGagorik/pyrename | pyrename/apps/main.py | 1 | 4719 | """
Rename files or directories using regular expression.
Does nothing without the --force option.
example:
./pyrename.py '(.*)\.py' '\g<1>_renamed.py'
"""
import logging
import os
import re
from .. import logutils
from .. import options
from .. import utils
logutils.setup_logging()
def main(args=None):
work = os.getcwd()
opts = options.get_arguments(work, args)
# check top level directory
if not os.path.exists(opts.top) or not os.path.isdir(opts.top):
logging.error('invalid top level directory: %s' % opts.top)
raise SystemExit(1)
# compile regex
if opts.ignorecase:
regex1 = re.compile(opts.pattern, re.IGNORECASE)
try:
regex2 = re.compile(opts.nomatch, re.IGNORECASE)
except TypeError:
regex2 = None
else:
regex1 = re.compile(opts.pattern)
try:
regex2 = re.compile(opts.nomatch)
except TypeError:
regex2 = None
# compile replace
if opts.func:
opts.replace = eval('lambda x : {}'.format(opts.replace))
# record errors
error = False
# find paths
opaths = []
npaths = []
for root, p in utils.walk(opts.top, r=opts.recursive, dirs=opts.dirs, files=opts.files):
match = regex1.match(p)
if match:
# exclude list
if p in opts.exclude:
logging.info('path excluded!\n\n\t%s\n', os.path.join(root, p))
continue
# exclude nomatch
if not regex2 is None and regex2.match(p):
logging.info('path excluded!\n\n\t%s\n', os.path.join(root, p))
continue
# construct new base
try:
n = regex1.sub(opts.replace, p)
except re.error:
logging.exception('regex error')
error = True
n = p
# construct paths
opath = os.path.join(root, p)
npath = os.path.join(root, n)
opaths.append((root, p, opath))
npaths.append((root, n, npath))
# output match
logging.info('found a match!\n\n\topath (%d): %s\n\tnpath (%d): %s\n',
os.path.exists(opath), opath, os.path.exists(npath), npath)
# descibe paths
oset = set(opaths)
nset = set(npaths)
iset = oset.intersection(nset)
logging.info('%d old', len(opaths))
logging.info('%d old (unique)', len(oset))
logging.info('%d new', len(npaths))
logging.info('%d new (unique)', len(nset))
logging.info('%d same', len(iset))
# make sure paths were found
try:
assert opaths
except AssertionError:
logging.error('no old paths found')
error = True
# make sure paths were found
try:
assert npaths
except AssertionError:
logging.error('no new paths found')
error = True
# make sure old paths are unique
try:
assert len(oset) is len(opaths)
except AssertionError:
logging.error('old paths are not unique')
error = True
# make sure new paths are unique
try:
assert len(nset) is len(npaths)
except AssertionError:
logging.error('new paths are not unique')
error = True
# make sure old paths and new paths do not intersect
try:
assert not iset
except AssertionError:
logging.error('some paths are the same')
error = True
# check if old paths exist
found = []
for root, base, path in opaths:
try:
assert os.path.exists(path)
except AssertionError:
found.append(path)
if found:
logging.error('some old paths do not exist\n\n\t%s\n',
'\n\t'.join(found))
error = True
# check if new paths exist
found = []
for root, base, path in npaths:
try:
assert not os.path.exists(path)
except AssertionError:
found.append(path)
if found:
logging.error('some new paths already exist\n\n\t%s\n',
'\n\t'.join(found))
error = True
# stop if there were errors
if error:
logging.error('invalid configuration')
raise SystemExit(1)
# move files
if opts.force:
logging.info('moving paths!')
for (oroot, obase, opath), (nroot, nbase, npath) in zip(opaths, npaths):
utils.move(opath, npath, git=opts.git)
else:
logging.info('\n\n\tThis was a dry run, please use --force to perform renaming\n')
if __name__ == '__main__':
try:
main()
except SystemExit:
pass
except:
logging.exception('caught unhandled exception') | mit | 1,729,102,449,597,880,800 | 26.283237 | 92 | 0.568341 | false | 3.824149 | false | false | false |
zaihui/wechatpy | wechatpy/client/api/message.py | 1 | 21520 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import re
import six
from optionaldict import optionaldict
from wechatpy.client.api.base import BaseWeChatAPI
from wechatpy.utils import random_string
class WeChatMessage(BaseWeChatAPI):
OPENID_RE = re.compile(r'^[\w\-]{28}$', re.I)
def _send_custom_message(self, data, account=None):
data = data or {}
if account:
data['customservice'] = {'kf_account': account}
return self._post(
'message/custom/send',
data=data
)
def send_text(self, user_id, content, account=None):
"""
发送文本消息
详情请参考
http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param content: 消息正文
:param account: 可选,客服账号
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.message.send_text('openid', 'text')
"""
data = {
'touser': user_id,
'msgtype': 'text',
'text': {'content': content}
}
return self._send_custom_message(data, account=account)
def send_image(self, user_id, media_id, account=None):
"""
发送图片消息
详情请参考
http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param media_id: 图片的媒体ID。 可以通过 :func:`upload_media` 上传。
:param account: 可选,客服账号
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.message.send_image('openid', 'media_id')
"""
data = {
'touser': user_id,
'msgtype': 'image',
'image': {
'media_id': media_id
}
}
return self._send_custom_message(data, account=account)
def send_voice(self, user_id, media_id, account=None):
"""
发送语音消息
详情请参考
http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param media_id: 发送的语音的媒体ID。 可以通过 :func:`upload_media` 上传。
:param account: 可选,客服账号
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.message.send_voice('openid', 'media_id')
"""
data = {
'touser': user_id,
'msgtype': 'voice',
'voice': {
'media_id': media_id
}
}
return self._send_custom_message(data, account=account)
def send_video(self, user_id, media_id, title=None,
description=None, account=None):
"""
发送视频消息
详情请参考
http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param media_id: 发送的视频的媒体ID。 可以通过 :func:`upload_media` 上传。
:param title: 视频消息的标题
:param description: 视频消息的描述
:param account: 可选,客服账号
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.message.send_video('openid', 'media_id', 'title', 'description')
"""
video_data = {
'media_id': media_id,
}
if title:
video_data['title'] = title
if description:
video_data['description'] = description
data = {
'touser': user_id,
'msgtype': 'video',
'video': video_data
}
return self._send_custom_message(data, account=account)
def send_music(self, user_id, url, hq_url, thumb_media_id,
title=None, description=None, account=None):
"""
发送音乐消息
详情请参考
http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param url: 音乐链接
:param hq_url: 高品质音乐链接,wifi环境优先使用该链接播放音乐
:param thumb_media_id: 缩略图的媒体ID。 可以通过 :func:`upload_media` 上传。
:param title: 音乐标题
:param description: 音乐描述
:param account: 可选,客服账号
:return: 返回的 JSON 数据包
"""
music_data = {
'musicurl': url,
'hqmusicurl': hq_url,
'thumb_media_id': thumb_media_id
}
if title:
music_data['title'] = title
if description:
music_data['description'] = description
data = {
'touser': user_id,
'msgtype': 'music',
'music': music_data
}
return self._send_custom_message(data, account=account)
def send_articles(self, user_id, articles, account=None):
"""
发送图文消息
详情请参考
http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param articles: 一个包含至多10个图文的数组, 或者微信图文消息素材 media_id
:param account: 可选,客服账号
:return: 返回的 JSON 数据包
"""
if isinstance(articles, (tuple, list)):
articles_data = []
for article in articles:
articles_data.append({
'title': article['title'],
'description': article['description'],
'url': article['url'],
'picurl': article.get('image', article.get('picurl')),
})
data = {
'touser': user_id,
'msgtype': 'news',
'news': {
'articles': articles_data
}
}
else:
data = {
'touser': user_id,
'msgtype': 'mpnews',
'mpnews': {
'media_id': articles,
}
}
return self._send_custom_message(data, account=account)
def send_card(self, user_id, card_id, card_ext, account=None):
"""
发送卡券消息
详情请参参考
http://mp.weixin.qq.com/wiki/1/70a29afed17f56d537c833f89be979c9.html
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param card_id: 卡券 ID
:param card_ext: 卡券扩展信息
:param account: 可选,客服账号
:return: 返回的 JSON 数据包
"""
data = {
'touser': user_id,
'msgtype': 'wxcard',
'wxcard': {
'card_id': card_id,
'card_ext': card_ext
}
}
return self._send_custom_message(data, account=account)
def delete_mass(self, msg_id):
"""
删除群发消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param msg_id: 要删除的群发消息 ID
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.message.delete_mass('message id')
"""
return self._post(
'message/mass/delete',
data={
'msg_id': msg_id
}
)
def _send_mass_message(self, group_or_users, msg_type, msg,
is_to_all=False, preview=False):
data = {
'msgtype': msg_type
}
if not preview:
if isinstance(group_or_users, (tuple, list)):
# send by user ids
data['touser'] = group_or_users
endpoint = 'message/mass/send'
else:
# send by group id
data['filter'] = {
'group_id': group_or_users,
'is_to_all': is_to_all,
}
endpoint = 'message/mass/sendall'
else:
if not isinstance(group_or_users, six.string_types):
raise ValueError('group_or_users should be string types')
# 预览接口
if self.OPENID_RE.match(group_or_users):
# 按照 openid 预览群发
data['touser'] = group_or_users
else:
# 按照微信号预览群发
data['towxname'] = group_or_users
endpoint = 'message/mass/preview'
data.update(msg)
return self._post(
endpoint,
data=data
)
def send_mass_text(self, group_or_users, content,
is_to_all=False, preview=False):
"""
群发文本消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param content: 消息正文
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:return: 返回的 JSON 数据包
"""
return self._send_mass_message(
group_or_users,
'text',
{
'text': {
'content': content
}
},
is_to_all,
preview
)
def send_mass_image(self, group_or_users, media_id,
is_to_all=False, preview=False):
"""
群发图片消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param media_id: 图片的媒体 ID。 可以通过 :func:`upload_media` 上传。
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:return: 返回的 JSON 数据包
"""
return self._send_mass_message(
group_or_users,
'image',
{
'image': {
'media_id': media_id
}
},
is_to_all,
preview
)
def send_mass_voice(self, group_or_users, media_id,
is_to_all=False, preview=False):
"""
群发语音消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param media_id: 语音的媒体 ID。可以通过 :func:`upload_media` 上传。
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:return: 返回的 JSON 数据包
"""
return self._send_mass_message(
group_or_users,
'voice',
{
'voice': {
'media_id': media_id
}
},
is_to_all,
preview
)
def send_mass_video(self, group_or_users, media_id, title=None,
description=None, is_to_all=False, preview=False):
"""
群发视频消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param media_id: 视频的媒体 ID。可以通过 :func:`upload_video` 上传。
:param title: 视频标题
:param description: 视频描述
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:return: 返回的 JSON 数据包
"""
video_data = {
'media_id': media_id
}
if title:
video_data['title'] = title
if description:
video_data['description'] = description
return self._send_mass_message(
group_or_users,
'mpvideo',
{
'mpvideo': video_data
},
is_to_all,
preview
)
def send_mass_article(self, group_or_users, media_id,
is_to_all=False, preview=False):
"""
群发图文消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param media_id: 图文的媒体 ID。可以通过 :func:`upload_articles` 上传。
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:return: 返回的 JSON 数据包
"""
return self._send_mass_message(
group_or_users,
'mpnews',
{
'mpnews': {
'media_id': media_id
}
},
is_to_all,
preview
)
def get_mass(self, msg_id):
"""
查询群发消息发送状态
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param msg_id: 群发消息后返回的消息id
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.message.get_mass('mass message id')
"""
return self._post(
'message/mass/get',
data={
'msg_id': msg_id
}
)
def send_template(self, user_id, template_id, data, url=None, mini_program=None):
"""
发送模板消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1445241432&lang=zh_CN
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param template_id: 模板 ID。在公众平台线上模板库中选用模板获得
:param url: 链接地址
:param data: 模板消息数据
:param mini_program: 跳小程序所需数据, 如:`{'appid': 'appid', 'pagepath': 'index?foo=bar'}`
:return: 返回的 JSON 数据包
"""
tpl_data = optionaldict(
touser=user_id,
template_id=template_id,
url=url,
miniprogram=mini_program,
data=data,
)
return self._post(
'message/template/send',
data=tpl_data
)
def get_autoreply_info(self):
"""
获取自动回复规则
详情请参考
http://mp.weixin.qq.com/wiki/7/7b5789bb1262fb866d01b4b40b0efecb.html
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
info = client.message.get_autoreply_info()
"""
return self._get('get_current_autoreply_info')
def send_mass_card(self, group_or_users, card_id,
is_to_all=False, preview=False):
"""
群发卡券消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param card_id: 卡券 ID
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:return: 返回的 JSON 数据包
"""
return self._send_mass_message(
group_or_users,
'wxcard',
{
'wxcard': {
'card_id': card_id
}
},
is_to_all,
preview
)
def get_subscribe_authorize_url(self, scene, template_id, redirect_url, reserved=None):
"""
构造请求用户授权的url
详情请参阅:
https://mp.weixin.qq.com/wiki?id=mp1500374289_66bvB
:param scene: 订阅场景值,开发者可以填0-10000的整形值,用来标识订阅场景值
:type scene: int
:param template_id: 订阅消息模板ID,登录公众平台后台,在接口权限列表处可查看订阅模板ID
:param redirect_url: 授权后重定向的回调地址
:param reserved: 用于保持请求和回调的状态,授权请后原样带回给第三方。该参数可用于防止csrf攻击。若不指定则随机生成。
"""
if reserved is None:
reserved = random_string()
base_url = 'https://mp.weixin.qq.com/mp/subscribemsg'
params = [
('action', 'get_confirm'),
('appid', self.appid),
('scene', scene),
('template_id', template_id),
('redirect_url', redirect_url),
('reserved', reserved),
]
encoded_params = six.moves.urllib.parse.urlencode(params)
url = '{base}?{params}#wechat_redirect'.format(base=base_url, params=encoded_params)
return url
def send_subscribe_template(self, openid, template_id, scene, title, data, url=None):
"""
一次性订阅消息,通过API推送订阅模板消息给到授权微信用户。
详情请参阅:
https://mp.weixin.qq.com/wiki?id=mp1500374289_66bvB
:param openid: 填接收消息的用户openid
:param template_id: 订阅消息模板ID
:param scene: 订阅场景值,开发者可以填0-10000的整形值,用来标识订阅场景值
:type scene: int
:param title: 消息标题,15字以内
:param data: 消息正文,value为消息内容,color为颜色,200字以内
:type data: dict
:param url: 点击消息跳转的链接,需要有ICP备案
"""
post_data = {
'touser': openid,
'template_id': template_id,
'url': url,
'scene': scene,
'title': title,
'data': data,
}
if url is not None:
post_data['url'] = url
return self._post(
'message/template/subscribe',
data=post_data,
)
| mit | 4,983,095,428,572,696,000 | 28.468599 | 92 | 0.512131 | false | 2.683284 | false | false | false |
PetrPPetrov/beautiful-capi | source/OverloadSuffixes.py | 1 | 4136 | #!/usr/bin/env python
#
# Beautiful Capi generates beautiful C API wrappers for your C++ classes
# Copyright (C) 2015 Petr Petrovich Petrov
#
# This file is part of Beautiful Capi.
#
# Beautiful Capi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Beautiful Capi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Beautiful Capi. If not, see <http://www.gnu.org/licenses/>.
#
from Parser import TClass, TNamespace, TBeautifulCapiRoot, TOverloadSuffixMode
from Helpers import get_full_method_name
class OverloadSuffixesProcessor(object):
def __init__(self, root_node: TBeautifulCapiRoot):
self.root_node = root_node
self.cur_overload_suffix_mode = TOverloadSuffixMode.Off
self.namespace_stack = []
class ParamsScope(object):
def __init__(self, overload_suffixes_processor, namespace_or_class: TNamespace or TClass):
self.overload_suffixes_processor = overload_suffixes_processor
self.old_overload_suffix_mode = self.overload_suffixes_processor.cur_overload_suffix_mode
if namespace_or_class.overload_suffix_mode_filled:
self.overload_suffixes_processor.cur_overload_suffix_mode = namespace_or_class.overload_suffix_mode
self.overload_suffixes_processor.namespace_stack.append(namespace_or_class.name)
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
self.overload_suffixes_processor.namespace_stack.pop()
self.overload_suffixes_processor.cur_overload_suffix_mode = self.old_overload_suffix_mode
def __check_for_unique(self, routines: [object]):
if self.cur_overload_suffix_mode != TOverloadSuffixMode.Off:
existing_names = {}
for routine in routines:
name = ''.join(get_full_method_name(routine))
if name in existing_names:
existing_names[name] += 1
old_suffix = routine.overload_suffix
routine.overload_suffix += str(existing_names[name])
if self.cur_overload_suffix_mode == TOverloadSuffixMode.Notify:
print(
'Warning: Method or function {routine_name}() is overloaded'
' and has no unique overload suffix ("{old_suffix}"). '
'Suffix "{suffix}" has been installed.'.format(
routine_name='::'.join(self.namespace_stack) + '::' + routine.name,
old_suffix=old_suffix,
suffix=routine.overload_suffix))
else:
existing_names[name] = 0
def __process_class(self, cur_class: TClass):
with OverloadSuffixesProcessor.ParamsScope(self, cur_class):
self.__check_for_unique(cur_class.methods)
self.__check_for_unique(cur_class.indexers)
def __process_namespace(self, namespace: TNamespace):
with OverloadSuffixesProcessor.ParamsScope(self, namespace):
self.__check_for_unique(namespace.functions)
for nested_namespace in namespace.namespaces:
self.__process_namespace(nested_namespace)
for cur_class in namespace.classes:
self.__process_class(cur_class)
def process(self):
for cur_namespace in self.root_node.namespaces:
self.cur_overload_suffix_mode = cur_namespace.overload_suffix_mode
self.__process_namespace(cur_namespace)
def process(root_node: TBeautifulCapiRoot):
suffixes_processor = OverloadSuffixesProcessor(root_node)
suffixes_processor.process()
| gpl-3.0 | -301,367,994,317,084,200 | 45.47191 | 115 | 0.646518 | false | 4.123629 | false | false | false |
ocontant/django_test | garderie/models.py | 1 | 2668 | from django.db import models
from django.utils.timezone import now
from datetime import date, datetime
#from django.core.validators import MaxValueValidator, MinValueValidator
# Create your models here.
class Children(models.Model):
#Children_ID = models.AutoField(primary_key=True)
Parents = models.ManyToManyField('Parent')
FirstName = models.CharField(max_length=40)
LastName = models.CharField(max_length=40)
Birthday = models.DateField()
RAMQ = models.CharField(max_length=14, blank=True, null=True)
RAMQ_Expiration = models.DateField(blank=True, null=True)
# def save(self, *args, **kwargs):
# if self.FullName is None:
# self.FullName = self.LastName+" "+self.FirstName
# super(Children, self).save(*args, **kwargs)
def __str__(self):
return self.FullName
def list_parents(self):
return ", ".join([parent.FullName for parent in self.Parents.all()])
@property
def children_age(self):
return int((date.today() - self.Birthday).days / 365.25 )
@property
def FullName(self):
return self.LastName+" "+self.FirstName
class Parent(models.Model):
#Parent_ID = models.AutoField(primary_key=True)
FirstName = models.CharField(max_length=40)
LastName = models.CharField(max_length=40)
Email = models.EmailField()
Phone = models.CharField(max_length=15)
Phone_emergency = models.CharField(max_length=15)
SIN = models.CharField(max_length=11)
def __str__(self):
return self.LastName + " " + self.FirstName
def list_childrens(self, obj):
return ", ".join([children.FullName for children in obj.children_set.all()])
@property
def FullName(self):
return self.LastName+" "+self.FirstName
class Educator(models.Model):
#Educator_ID = models.AutoField(primary_key=True)
FirstName = models.CharField(max_length=40)
LastName = models.CharField(max_length=40)
FullName = "%s %s" % (LastName, FirstName)
Email = models.EmailField()
Phone = models.CharField(max_length=15)
Phone_emergency = models.CharField(max_length=15)
def __str__(self):
return "{} {}".format(self.LastName + " " + self.FirstName, self.list_classes())
def list_classes(self):
return ", ".join([classe.name() for classe in self.Classes.all()])
class Classe(models.Model):
#Classe_ID = models.AutoField(primary_key=True)
Educators = models.ManyToManyField(Educator)
Name = models.CharField(max_length=255)
def __str__(self):
return "{} {}".format(self.Name, self.list_educators())
def list_educators(self):
return ", ".join([educator.name() for educator in self.Educators.all()])
| gpl-3.0 | -205,060,988,150,681,100 | 27.382979 | 84 | 0.682159 | false | 3.411765 | false | false | false |
quells/MarkdownToReport | MDToLatexTemplates.py | 1 | 2554 | #!/usr/bin/python
# -*- coding: utf-8 -*-
def BuildDocument(doctype):
if doctype == 'report':
doctype = 'article'
return '''\documentclass[11pt, oneside]{%s}
\usepackage[margin=1in]{geometry} \geometry{letterpaper}
\usepackage{setspace}
\usepackage[font=singlespacing,labelfont=bf]{caption}
\usepackage{indentfirst}
\usepackage{float}
\usepackage{booktabs}
\usepackage{amsmath, gensymb}
\usepackage{url}
\usepackage{graphicx}
\makeatletter
\def\maxwidth{0.6\columnwidth}
\makeatother
\let\Oldincludegraphics\includegraphics
\\renewcommand{\includegraphics}[1]{\Oldincludegraphics[width=\maxwidth]{#1}}
'''%(doctype)
def BuildTable(caption, alignments, headers, rows, label):
latex = '''\\begin{table}[H]
\\caption{%s}
\\centering
\\begin{tabular}%s
\\hline
%s\\\\
\\hline
'''%(caption, alignments, headers)
for r in rows:
if type(r) == type('s'):
latex += r
else:
latex += r[0] + ' & '.join(r[1:]) + '\\\\\n'
latex += '''\\hline
\\end{tabular}
\\label{%s}
\\end{table}'''%(label)
return latex
def BuildFigure(file, caption, label):
return '''\\begin{figure}[H]
\\centering
\\includegraphics{%s}
\\caption{%s}
\\label{%s}
\\end{figure}'''%(file, caption, label)
def BuildTitle(title, subtitle):
latex = ''
if title != None:
latex += r'\title{%s'%(title)
if subtitle != None:
latex += r'\\\vspace{0.5em}{\large %s}'%(subtitle)
latex += '}\n'
return latex
def BuildAuthor(author):
latex = ''
if author != None:
latex += '\\author{%s}\n'%(author)
return latex
def BeginDocument(doctype):
latex = '\\date{}\n\n\\hyphenpenalty=100000\n\n\\begin{document}\n\n'
if doctype == 'report':
latex += '\\pagenumbering{gobble}\n'
latex += '\\maketitle\n'
return latex
def BuildInfoTable(Class, due, received):
latex = ''
if Class != None or due != None or received != None:
latex += '''\\begin{table}[H]
\\centering
\\begin{tabular}{l}
'''
if Class != None:
latex += '%s\\\\\n'%(Class)
if due != None:
latex += 'Due: %s\\\\\n'%(due)
if received != None:
latex += 'Received: %s\\\\\n'%(received)
latex += '''\end{tabular}
\end{table}
'''
return latex
def BuildThanks(thanks):
return '\\thanks{\\noindent %s\\\\\\\\}\n'%(thanks) if thanks != None else ''
def BuildAbstract(abstract):
return '\\begin{noindent}\n\\textbf{%s}\n\\end{noindent}\n\n'%(abstract) if abstract != None else ''
def BuildList(kind, items):
kind = 'enumerate' if kind == 'ordered' else 'itemize'
latex = '\\begin{%s}\n'%(kind)
for i in items:
latex += '\t\\item %s'%(i)
latex += '\\end{%s}\n'%(kind)
return latex | mit | 2,245,237,749,293,752,800 | 23.103774 | 101 | 0.639781 | false | 2.794311 | false | false | false |
hookehu/utility | max_rects_bin_pack.py | 1 | 22454 | #-*- coding:utf-8 -*-
import dircache, os, math, sys
from PIL import Image
from psd_tools import PSDImage
from psd_tools import Group
import json
class Rectangle:
x = 0
y = 0
width = 0
height = 0
offX = 0
offY = 0
origin_width = 0
origin_height = 0
arena = 0
def __init__(self):
self.name = ""
pass
def clone(self):
rst = Rectangle()
rst.name = self.name
rst.x = self.x
rst.y = self.y
rst.width = self.width
rst.height = self.height
rst.arena = self.arena
rst.offX = self.offX
rst.offY = self.offY
rst.origin_width = self.origin_width
rst.origin_height = self.origin_height
return rst
def to_dict(self):
rst = {}
rst['x'] = self.x
rst['y'] = self.y
rst['w'] = self.width
rst['h'] = self.height
rst['offX'] = self.offX
rst['offY'] = self.offY
rst['sourceW'] = self.origin_width
rst['sourceH'] = self.origin_height
return rst
class FreeRectangleChoiceHeuristic:
BestShortSideFit = 0 #< -BSSF: Positions the Rectangle against the short side of a free Rectangle into which it fits the best.
BestLongSideFit = 1 #< -BLSF: Positions the Rectangle against the long side of a free Rectangle into which it fits the best.
BestAreaFit = 2 #< -BAF: Positions the Rectangle into the smallest free Rectangle into which it fits.
BottomLeftRule = 3 #< -BL: Does the Tetris placement.
ContactPointRule = 4 #< -CP: Choosest the placement where the Rectangle touches other Rectangles as much as possible.
class MaxRectsBinPack:
binWidth = 0
binHeight = 0
allowRotations = False
usedRectangles = [] #new Vector.<Rectangle>();
freeRectangles = [] #new Vector.<Rectangle>();
score1 = 0
score2 = 0
bestShortSideFit = 0
bestLongSideFit = 0
def __init__(self, width, height, rotations = True):
self.init(width, height, rotations)
def init(self, width, height, rotations = True):
if( self.count(width) % 1 != 0 or self.count(height) % 1 != 0):
print "Must be 2,4,8,16,32,...512,1024,..."
return
self.binWidth = width
self.binHeight = height
self.allowRotations = rotations
n = Rectangle()
n.x = 0
n.y = 0
n.width = width
n.height = height
self.usedRectangles = []
self.freeRectangles = []
self.freeRectangles.append(n)
def count(self, n):
if( n >= 2 ):
return self.count(n / 2)
return n
def insert(self, rect, method):
width = rect.width
height = rect.height
name = rect.name
newNode = Rectangle()
score1 = 0
score2 = 0
if method == FreeRectangleChoiceHeuristic.BestShortSideFit:
newNode = self.findPositionForNewNodeBestShortSideFit(width, height)
elif method == FreeRectangleChoiceHeuristic.BottomLeftRule:
newNode = self.findPositionForNewNodeBottomLeft(width, height, score1, score2)
elif method == FreeRectangleChoiceHeuristic.ContactPointRule:
newNode = self.findPositionForNewNodeContactPoint(width, height, score1)
elif method == FreeRectangleChoiceHeuristic.BestLongSideFit:
newNode = self.findPositionForNewNodeBestLongSideFit(width, height, score2, score1)
elif method == FreeRectangleChoiceHeuristic.BestAreaFit:
newNode = self.findPositionForNewNodeBestAreaFit(width, height, score1, score2)
newNode.name = name
newNode.offX = rect.offX
newNode.offY = rect.offY
newNode.origin_width = rect.origin_width
newNode.origin_height = rect.origin_height
newNode.arena = rect.arena
if newNode.height == 0:
print "not posi for set"
return newNode
self.placeRectangle(newNode)
return newNode
def insert2(self, Rectangles, dst, method):
del dst[:] #dst.length = 0;
while(len(Rectangles) > 0):
bestScore1 = sys.maxint #int.MAX_VALUE
bestScore2 = sys.maxint #int.MAX_VALUE
bestRectangleIndex = -1
bestNode = Rectangle()
for i in range(len(Rectangles)):
score1 = 0
score2 = 0
newNode = self.scoreRectangle(Rectangles[i].width, Rectangles[i].height, method, score1, score2)
newNode.name = Rectangles[i].name
newNode.offX = Rectangles[i].offX
newNode.offY = Rectangles[i].offY
newNode.origin_width = Rectangles[i].origin_width
newNode.origin_height = Rectangles[i].origin_height
newNode.arena = Rectangles[i].arena
if score1 < bestScore1 or (score1 == bestScore1 and score2 < bestScore2):
bestScore1 = score1
bestScore2 = score2
bestNode = newNode
bestRectangleIndex = i
if (bestRectangleIndex == -1):
return
self.placeRectangle(bestNode)
del Rectangles[bestRectangleIndex] #Rectangles.splice(bestRectangleIndex,1)
def placeRectangle(self, node):
numRectanglesToProcess = len(self.freeRectangles)
i = 0
while i < numRectanglesToProcess:
if self.splitFreeNode(self.freeRectangles[i], node):
del self.freeRectangles[i] #freeRectangles.splice(i,1);
i = i - 1
numRectanglesToProcess = numRectanglesToProcess - 1
i = i + 1
self.pruneFreeList()
self.usedRectangles.append(node)
def scoreRectangle(self, width, height, method, score1, score2):
newNode = Rectangle()
self.score1 = sys.maxint #int.MAX_VALUE;
self.score2 = sys.maxint #int.MAX_VALUE;
if method == FreeRectangleChoiceHeuristic.BestShortSideFit:
newNode = self.findPositionForNewNodeBestShortSideFit(width, height)
elif method == FreeRectangleChoiceHeuristic.BottomLeftRule:
newNode = self.findPositionForNewNodeBottomLeft(width, height, self.score1, self.score2)
elif method == FreeRectangleChoiceHeuristic.ContactPointRule:
newNode = self.findPositionForNewNodeContactPoint(width, height, self.score1)
self.score1 = -self.score1;
elif method == FreeRectangleChoiceHeuristic.BestLongSideFit:
newNode = self.findPositionForNewNodeBestLongSideFit(width, height, self.score2, self.score1)
elif method == FreeRectangleChoiceHeuristic.BestAreaFit:
newNode = self.findPositionForNewNodeBestAreaFit(width, height, self.score1, self.score2)
#// Cannot fit the current Rectangle.
if newNode.height == 0:
self.score1 = sys.maxint #int.MAX_VALUE;
self.score2 = sys.maxint #int.MAX_VALUE;
print "not posi for set"
return newNode
#Computes the ratio of used surface area.
def occupancy(self):
usedSurfaceArea = 0
for rect in self.usedRectangles:
usedSurfaceArea = usedSurfaceArea + rect.width * rect.height;
return usedSurfaceArea / (self.binWidth * self.binHeight)
def findPositionForNewNodeBottomLeft(self, width, height, bestY, bestX):
bestNode = Rectangle()
bestY = sys.maxint;
topSideY = 0
for rect in self.freeRectangles:#(var i:int = 0; i < freeRectangles.length; i++) {
if rect.width >= width and rect.height >= height:
topSideY = rect.y + height
if topSideY < bestY or (topSideY == bestY and rect.x < bestX):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = width
bestNode.height = height
bestY = topSideY
bestX = rect.x
if self.allowRotations or rect.width >= height and rect.height >= width:
topSideY = rect.y + width
if topSideY < bestY or (topSideY == bestY and rect.x < bestX):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = height
bestNode.height = width
bestY = topSideY
bestX = rect.x
return bestNode
def findPositionForNewNodeBestShortSideFit(self, width, height):
bestNode = Rectangle()
self.bestShortSideFit = sys.maxint #int.MAX_VALUE;
self.bestLongSideFit = self.score2
leftoverHoriz = 0
leftoverVert = 0
shortSideFit = 0
longSideFit = 0
for rect in self.freeRectangles: #(var i:int = 0; i < freeRectangles.length; i++) {
if rect.width >= width and rect.height >= height:
leftoverHoriz = math.fabs(rect.width - width)
leftoverVert = math.fabs(rect.height - height)
shortSideFit = min(leftoverHoriz, leftoverVert)
longSideFit = max(leftoverHoriz, leftoverVert)
if shortSideFit < self.bestShortSideFit or (shortSideFit == self.bestShortSideFit and longSideFit < self.bestLongSideFit):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = width
bestNode.height = height
self.bestShortSideFit = shortSideFit
self.bestLongSideFit = longSideFit
flippedLeftoverHoriz = 0
flippedLeftoverVert = 0
flippedShortSideFit = 0
flippedLongSideFit = 0
if self.allowRotations and rect.width >= height and rect.height >= width:
flippedLeftoverHoriz = math.fabs(rect.width - height)
flippedLeftoverVert = math.fabs(rect.height - width)
flippedShortSideFit = min(flippedLeftoverHoriz, flippedLeftoverVert)
flippedLongSideFit = max(flippedLeftoverHoriz, flippedLeftoverVert)
if flippedShortSideFit < self.bestShortSideFit or (flippedShortSideFit == self.bestShortSideFit or flippedLongSideFit < self.bestLongSideFit):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = height
bestNode.height = width
self.bestShortSideFit = flippedShortSideFit
self.bestLongSideFit = flippedLongSideFit
return bestNode
def findPositionForNewNodeBestLongSideFit(self, width, height, bestShortSideFit, bestLongSideFit):
bestNode = Rectangle()
self.bestLongSideFit = sys.maxint #int.MAX_VALUE;
leftoverHoriz = 0
leftoverVert = 0
shortSideFit = 0
longSideFit = 0
for rect in self.freeRectangles: #(var i:int = 0; i < freeRectangles.length; i++) {
if rect.width >= width and rect.height >= height:
leftoverHoriz = math.fabs(rect.width - width)
leftoverVert = math.fabs(rect.height - height)
shortSideFit = min(leftoverHoriz, leftoverVert)
longSideFit = max(leftoverHoriz, leftoverVert)
if longSideFit < self.bestLongSideFit or (longSideFit == self.bestLongSideFit and shortSideFit < self.bestShortSideFit):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = width
bestNode.height = height
self.bestShortSideFit = shortSideFit
self.bestLongSideFit = longSideFit
if self.allowRotations and rect.width >= height and rect.height >= width:
leftoverHoriz = math.fabs(rect.width - height)
leftoverVert = math.fabs(rect.height - width)
shortSideFit = min(leftoverHoriz, leftoverVert)
longSideFit = max(leftoverHoriz, leftoverVert)
if longSideFit < self.bestLongSideFit or (longSideFit == self.bestLongSideFit and shortSideFit < self.bestShortSideFit):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = height
bestNode.height = width
self.bestShortSideFit = shortSideFit
self.bestLongSideFit = longSideFit
return bestNode
def findPositionForNewNodeBestAreaFit(self, width, height, bestAreaFit, bestShortSideFit):
bestNode = Rectangle()
self.bestAreaFit = sys.maxint #int.MAX_VALUE;
leftoverHoriz = 0
leftoverVert = 0
shortSideFit = 0
areaFit = 0
for rect in self.freeRectangles: #(var i:int = 0; i < freeRectangles.length; i++) {
areaFit = rect.width * rect.height - width * height
if rect.width >= width and rect.height >= height:
leftoverHoriz = math.fabs(rect.width - width)
leftoverVert = math.fabs(rect.height - height)
shortSideFit = min(leftoverHoriz, leftoverVert)
if areaFit < self.bestAreaFit or (areaFit == self.bestAreaFit and shortSideFit < self.bestShortSideFit):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = width
bestNode.height = height
self.bestShortSideFit = shortSideFit
self.bestAreaFit = areaFit
if self.allowRotations and rect.width >= height and rect.height >= width:
leftoverHoriz = math.fabs(rect.width - height)
leftoverVert = math.fabs(rect.height - width)
shortSideFit = min(leftoverHoriz, leftoverVert)
if areaFit < bestAreaFit or (areaFit == self.bestAreaFit and shortSideFit < self.bestShortSideFit):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = height
bestNode.height = width
self.bestShortSideFit = shortSideFit
self.bestAreaFit = areaFit
return bestNode
def commonIntervalLength(self, i1start, i1end, i2start, i2end):
if i1end < i2start or i2end < i1start:
return 0
return min(i1end, i2end) - max(i1start, i2start)
def contactPointScoreNode(self, x, y, width, height):
score = 0
if (x == 0 or x + width == self.binWidth):
score += height
if (y == 0 or y + height == self.binHeight):
score += width
for rect in self.usedRectangles: #(var i:int = 0; i < usedRectangles.length; i++) {
if (rect.x == x + width or rect.x + rect.width == x):
score = score + self.commonIntervalLength(rect.y, rect.y + rect.height, y, y + height)
if (rect.y == y + height or rect.y + rect.height == y):
score = score + self.commonIntervalLength(rect.x, rect.x + rect.width, x, x + width)
return score
def findPositionForNewNodeContactPoint(self, width, height, bestContactScore):
bestNode = Rectangle()
bestContactScore = -1
score = 0
for rect in self.freeRectangles: #(var i:int = 0; i < freeRectangles.length; i++) {
if (rect.width >= width and rect.height >= height):
score = self.contactPointScoreNode(rect.x, rect.y, width, height)
if (score > bestContactScore):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = width
bestNode.height = height
bestContactScore = score
if (self.allowRotations and rect.width >= height and rect.height >= width):
score = self.contactPointScoreNode(rect.x, rect.y, height, width)
if (score > bestContactScore):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = height
bestNode.height = width
bestContactScore = score
return bestNode
def splitFreeNode(self, freeNode, usedNode):
if (usedNode.x >= freeNode.x + freeNode.width or usedNode.x + usedNode.width <= freeNode.x or
usedNode.y >= freeNode.y + freeNode.height or usedNode.y + usedNode.height <= freeNode.y):
return False
newNode = None
if (usedNode.x < freeNode.x + freeNode.width and usedNode.x + usedNode.width > freeNode.x):
if (usedNode.y > freeNode.y and usedNode.y < freeNode.y + freeNode.height):
newNode = freeNode.clone()
newNode.height = usedNode.y - newNode.y
self.freeRectangles.append(newNode)
if (usedNode.y + usedNode.height < freeNode.y + freeNode.height):
newNode = freeNode.clone()
newNode.y = usedNode.y + usedNode.height
newNode.height = freeNode.y + freeNode.height - (usedNode.y + usedNode.height)
self.freeRectangles.append(newNode)
if (usedNode.y < freeNode.y + freeNode.height and usedNode.y + usedNode.height > freeNode.y):
if (usedNode.x > freeNode.x and usedNode.x < freeNode.x + freeNode.width):
newNode = freeNode.clone()
newNode.width = usedNode.x - newNode.x
self.freeRectangles.append(newNode)
if (usedNode.x + usedNode.width < freeNode.x + freeNode.width):
newNode = freeNode.clone()
newNode.x = usedNode.x + usedNode.width
newNode.width = freeNode.x + freeNode.width - (usedNode.x + usedNode.width)
self.freeRectangles.append(newNode)
return True
def pruneFreeList(self):
i = 0
j = 0
flen = len(self.freeRectangles)
while i < flen:
j = i + 1
while j < len(self.freeRectangles):
if (self.isContainedIn(self.freeRectangles[i], self.freeRectangles[j])):
del self.freeRectangles[i] #.splice(i,1);
break
if (self.isContainedIn(self.freeRectangles[j], self.freeRectangles[i])):
del self.freeRectangles[j] #.splice(j,1);
j = j + 1
i = i + 1
def isContainedIn(self, a, b):
return a.x >= b.x and a.y >= b.y and\
a.x+a.width <= b.x+b.width and\
a.y+a.height <= b.y+b.height
class Demo:
#原图目录
res_path = "E:/Temp/abc"
#生成的图集存放目录
output_path = "E:/Temp"
total_arena = 0
MAX_SIZE = 1024
MIN_SIZE = 128
BASE_ALPHA = 15
width = 128
height = 128
count = 0
def __init__(self):
pass
def get_output_name(self):
name = 'sheet' + str(self.count) + '.png'
jsonname = 'sheet' + str(self.count) + '.json'
self.count = self.count + 1
return name, jsonname
def proc(self):
files = dircache.listdir(self.res_path)
self.maxRect = MaxRectsBinPack(self.width, self.height, False)
rects = []
self.maps = {}
for f in files:
p = self.res_path + '/' + f
img = Image.open(p)
img_width, img_height = img.size
minx, maxx, miny, maxy = self.get_edge(img)
rw = maxx - minx
rh = maxy - miny
img.close()
self.total_arena = self.total_arena + img_width * img_height
rect = Rectangle()
rect.name = f
rect.origin_width = img_width
rect.origin_height = img_height
rect.offX = minx
rect.offY = miny
rect.width = rw
rect.height = rh
rect.arena = rw * rh
if rw > 450 or rh > 450:#超过尺寸不打图集
continue
rects.append(rect)
self.maps[f] = p
rects = sorted(rects, key=lambda s:s.arena)
while True:
rst = self.proc_rects(rects)
if rst:#处理完成
break
if self.width == self.height and self.width == self.MAX_SIZE:
print "next sheet"
self.output()
self.width = self.MIN_SIZE
self.height = self.MIN_SIZE
continue
if self.width == self.height:
self.get_next_width()
self.maxRect = MaxRectsBinPack(self.width, self.height, False)
continue
else:
self.get_next_height()
self.maxRect = MaxRectsBinPack(self.width, self.height, False)
continue
self.output()
def output(self):
oi = Image.new("RGBA", (self.width, self.height), 0)
print self.width, self.height
od = {}
od['frames'] = {}
for r in self.maxRect.usedRectangles:
i = Image.open(self.maps[r.name])
crop = i.crop((r.offX, r.offY, r.width, r.height))
oi.paste(crop, (r.x, r.y))
i.close()
od['frames'][r.name.replace('.', '_')] = r.to_dict()
oimg_name, ojson_name = self.get_output_name()
oi.save(self.output_path + "/" + oimg_name)
od["file"] = oimg_name
jsonstr = json.dumps(od, indent=2, encoding="utf-8")
fd = open(self.output_path + "/" + ojson_name, 'wb')
fd.write(jsonstr);
fd.close();
def proc_rects(self, rects):
dels = []
for rect in rects:
dels.append(rect)
rst = self.maxRect.insert(rect, FreeRectangleChoiceHeuristic.BestLongSideFit);
if rst.height == 0:
if self.width == self.height == self.MAX_SIZE:
#生成下一个sheet
for d in dels:
rects.remove(d)
return False
return True
def get_next_width(self):
self.width = self.width * 2
if self.width > self.MAX_SIZE:
self.width = self.MAX_SIZE
def get_next_height(self):
self.height = self.height * 2
if self.height > self.MAX_SIZE:
self.height = self.MAX_SIZE
def get_edge(self, img):
alpha = img.load()
w, h = img.size
minx = 0
maxx = w
miny = 0
maxy = h
x = 0
find = False
while x < w:
y = 0
while y < h:
p = alpha[x, y]
if len(p) <= 3:
p = (p[0], p[1], p[2], 255)
if p[3] > self.BASE_ALPHA:
minx = x
find = True
break
y = y + 1
if find:
break
x = x + 1
find = False
x = w - 1
while x >= 0:
y = 0
while y < h:
p = alpha[x, y]
if len(p) <= 3:
p = (p[0], p[1], p[2], 255)
if p[3] > self.BASE_ALPHA:
maxx = x
find = True
break
y = y + 1
if find:
break
x = x - 1
find = False
y = 0
while y < h:
x = 0
while x < w:
p = alpha[x, y]
if len(p) <= 3:
p = (p[0], p[1], p[2], 255)
if p[3] > self.BASE_ALPHA:
miny = y
find = True
break
x = x + 1
if find:
break
y = y + 1
find = False
y = h - 1
while y >= 0:
x = 0
while x < w:
p = alpha[x, y]
if len(p) <= 3:
p = (p[0], p[1], p[2], 255)
if p[3] > self.BASE_ALPHA:
maxy = y
find = True
break
x = x + 1
if find:
break
y = y - 1
return minx, maxx, miny, maxy
def begin(self):
files = dircache.listdir(self.res_path)
maxRect = MaxRectsBinPack(512, 256, False)
rects = []
maps = {}
for f in files:
p = self.res_path + '/' + f
img = Image.open(p)
img_width, img_height = img.size
self.total_arena = self.total_arena + img_width * img_height
rect = Rectangle()
rect.name = f
rect.width = img_width
rect.height = img_height
rects.append(rect)
maps[f] = img
maxRect.insert2(rects, [], FreeRectangleChoiceHeuristic.BestLongSideFit)
oi = Image.new("RGBA", (512, 256), 0)
for r in maxRect.usedRectangles:
print str(r.x) + "_" + str(r.y) + "_" + str(r.width) + "_" + str(r.height)
i = maps[r.name]
crop = i.crop((0, 0, r.width, r.height))
oi.paste(crop, (r.x, r.y))
#oi.show()
oi.save(self.output_path + "/test.png")
print self.total_arena
if __name__ == "__main__":
d = Demo()
d.proc()
print "success" | gpl-2.0 | -6,912,823,985,310,934,000 | 32.880484 | 150 | 0.615299 | false | 3.047632 | false | false | false |
jasonleaster/LeetCode | Add_Binary/addBinary.py | 1 | 1213 | class Solution(object):
def addBinary(self, a, b):
"""
:type a: str
:type b: str
:rtype: str
"""
x = [int(a[i]) for i in xrange(len(a))]
y = [int(b[i]) for i in xrange(len(b))]
x = x[::-1]
y = y[::-1]
if len(x) > len(y):
while len(x) > len(y):
y.append(0)
else:
while len(x) < len(y):
x.append(0)
x.append(0)
y.append(0)
for i in xrange(len(x)):
x[i] += y[i]
for i in xrange(len(x)):
if x[i] >= 2:
tmp = x[i]
x[i] = tmp % 2
x[i + 1] += tmp / 2
string = ""
find = False
for i in range(len(x)-1, -1, -1):
if find == False:
if x[i] != 0:
find = True
string += str(x[i])
else:
string += str(x[i])
if len(string) == 0:
return "0"
else:
return string
# ------- test ---------
s = Solution()
print s.addBinary("0", "0")
print s.addBinary("11", "1111")
print s.addBinary("11", "11")
| gpl-2.0 | -8,802,240,305,176,711,000 | 21.886792 | 47 | 0.354493 | false | 3.416901 | false | false | false |
hivetech/hivy | setup.py | 1 | 1729 | import sys
import os
from setuptools import setup, find_packages
from hivy import __project__, __version__, __author__, __licence__
if sys.argv[-1] == 'test':
status = os.system('make tests')
sys.exit(1 if status > 127 else status)
# Needs also : apt-get install swig
requires = [
'Flask-RESTful>=0.2.11',
'docopt>=0.6.1',
'itsdangerous>=0.23',
'pytz>=2013.9',
'salt>=0.17.5',
'sh>=1.09',
'Logbook>=0.6.0',
'structlog>=0.4.1',
'docker-py>=0.2.3']
def long_description():
try:
#with codecs.open(readme, encoding='utf8') as f:
with open('readme.md') as f:
return f.read()
except IOError:
return "failed to read README.md"
setup(
name=__project__,
version=__version__,
description='This plugin provides a RESTFul interface to unide',
author=__author__,
author_email='[email protected]',
packages=find_packages(),
long_description=long_description(),
license=__licence__,
install_requires=requires,
url="https://github.com/hivetech/hivy",
entry_points={
'console_scripts': [
'hivy = hivy.__main__:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'Topic :: Software Development',
'Topic :: System :: Shells',
],
scripts=['scripts/hivy-watchdog'],
data_files=[(os.path.expanduser('~/.hivy'), ['./Procfile'])]
)
| apache-2.0 | 2,771,885,732,673,024,500 | 26.444444 | 68 | 0.585888 | false | 3.609603 | false | false | false |
Anthony25/python_tc_qos | pyqos/app.py | 1 | 5166 | #!/usr/bin/env python3
# Author: Anthony Ruhier
import argparse
import logging
import os
import subprocess
import sys
from pyqos.backend import tc
from pyqos.config import Config, ConfigAttribute
global_logger = logging.getLogger("pyqos")
_logger = logging.getLogger(__name__)
class PyQoS():
"""
Application to simplify the initialization of the QoS rules. Inspired from
the Flask project.
Usually you create a :class:`PyQoS` instance in your main module or
in the :file:`__init__.py` file of your package like this::
from pyqos import PyQoS
app = PyQoS(application_name)
"""
#: set the main logger in debug mode or not
debug = ConfigAttribute("DEBUG")
#: dryrun
dryrun = ConfigAttribute("DRYRUN")
#: name of the main logger
logger_name = ConfigAttribute('LOGGER_NAME')
#: configuration default values
default_config = {
"DEBUG": False,
"DRYRUN": False,
"LOGGER_NAME": None,
"INTERFACES": dict(),
}
#: list of qos object to apply at run
run_list = list()
def __init__(self, app_name="pyqos", root_path=None):
self.app_name = app_name
self.config = Config(root_path, self.default_config)
self._logger = None
self.logger_name = self.app_name
@property
def logger(self):
"""
A :class:`logging.Logger` object for this application. The
default configuration is to log to stderr if the application is
in debug mode. This logger can be used to (surprise) log messages.
Here some examples::
app.logger.debug('A value for debugging')
app.logger.warning('A warning occurred (%d apples)', 42)
app.logger.error('An error occurred')
"""
if not (self._logger and self._logger.name == self.logger_name):
self._logger = logging.Logger(self.logger_name)
if self.config["DEBUG"]:
self._logger.setLevel(logging.DEBUG)
else:
self._logger.setLevel(logging.WARNING)
return self._logger
def get_ifnames(self, interfaces_lst=None):
if interfaces_lst is None:
interfaces_lst = self.config["INTERFACES"]
if_names = set()
for interface in interfaces_lst.values():
if "name" in interface.keys():
if_names.add(interface["name"])
else:
if_names.update(self.get_ifnames(interfaces_lst=interface))
return if_names
def run_as_root(self):
"""
Restart the script as root
"""
if os.geteuid() != 0:
print("You need to be root to run this script. Relaunching with "
"sudo...\n")
subprocess.call(["sudo", sys.executable] + sys.argv)
exit()
def apply_qos(self):
self.run_as_root()
# Clean old rules
self.reset_qos()
# Setting new rules
print("Setting new rules")
for r in self.run_list:
r.apply(dryrun=self.config.get("DRYRUN", False))
def reset_qos(self):
"""
Reset QoS for all configured interfaces
"""
self.run_as_root()
print("Removing tc rules")
ifnames = self.get_ifnames()
tc.qdisc_del(ifnames, stderr=subprocess.DEVNULL)
def show_qos(self):
ifnames = self.get_ifnames()
print("\n\t QDiscs details\n\t================\n")
tc.qdisc_show(ifnames, "details")
print("\n\t QDiscs stats\n\t==============\n")
tc.qdisc_show(ifnames, "details")
def init_parser(self):
"""
Init argparse objects
"""
parser = argparse.ArgumentParser(
description="Tool to set, show or delete QoS rules on Linux"
)
# Start/Stop/Show command
sp_action = parser.add_subparsers()
sp_start = sp_action.add_parser("start", help="set QoS rules")
sp_stop = sp_action.add_parser("stop", help="remove all QoS rules")
sp_show = sp_action.add_parser("show", help="show QoS rules")
# Set function to call for each options
sp_start.set_defaults(func=self.apply_qos)
sp_stop.set_defaults(func=self.reset_qos)
sp_show.set_defaults(func=self.show_qos)
# Debug option
parser.add_argument('-d', '--debug', help="set the debug level",
dest="debug", action="store_true")
parser.add_argument('-D', '--dryrun', help="dry run",
dest="dryrun", action="store_true")
self.arg_parser = parser
def run(self):
self.init_parser()
# If no argument provided show help
if len(sys.argv) == 1:
self.arg_parser.print_help()
sys.exit(1)
# Parse argument
args = self.arg_parser.parse_args()
self.dryrun = args.dryrun
if args.debug or args.dryrun:
self.debug = True
# Execute correct function, or print usage
if hasattr(args, "func"):
args.func()
else:
self.arg_parser.print_help()
sys.exit(1)
| bsd-2-clause | 1,324,399,131,611,814,000 | 30.888889 | 78 | 0.577042 | false | 3.910674 | true | false | false |
andrecunha/coh-metrix-dementia | coh/tools/tag/api.py | 1 | 4362 | # -*- coding: utf-8 -*-
# Coh-Metrix-Dementia - Automatic text analysis and classification for dementia.
# Copyright (C) 2014 Andre Luiz Verucci da Cunha
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals, print_function, division
from functools import partial
class Tagger(object):
"""Represents an interface for classes that perform part-of-speech tagging.
There are two basic methods:
tag: takes as input a list of tokens and return a list of tuples
(string, string), containing the token and its PoS tag.
batch_tag: takes as input a list of tokenized sentences and analyze
them all at once.
Derived classes must override at least one these methods. This class is
based on nltk.tag.api.TaggerI
(see http://www.nltk.org/api/nltk.tag.html#nltk.tag.api.TaggerI).
"""
def tag(self, tokens):
"""Assign a part-of-speech tag to a tokenized sentence.
Required parameters:
tokens -- a list of strings, containing the tokens to be analyzed.
Returns:
A list of pairs (string, string), where the first string is the token
and the second one is the corresponding PoS tag.
"""
tagged_sents = self.tag_sents([tokens])
return tagged_sents[0] if tagged_sents else []
def tag_sents(self, sentences):
"""Assign part-of-speech tags to multiple sentences at once.
Required parameters:
sentences -- A list of lists of strings, containing the tokens to
be analyzed, separated by sentences.
Returns:
A list of lists of pairs (string, string), one list of each sentence.
"""
return [self.tag(sent) for sent in sentences]
class TagSet(object):
"""Represents a set of tags used by a tagger. This class is entended to
facilitate the use of multiple taggers with different tagsets.
Subclasses must, at least, define the *_tags lists.
"""
article_tags = []
verb_tags = []
auxiliary_verb_tags = []
participle_tags = []
noun_tags = []
adjective_tags = []
adverb_tags = []
pronoun_tags = []
numeral_tags = []
conjunction_tags = []
preposition_tags = []
interjection_tags = []
currency_tags = []
content_word_tags = []
function_word_tags = []
functions_as_noun_tags = []
functions_as_adjective_tags = []
punctuation_tags = []
fine_to_coarse = {}
def __init__(self):
"""Form a TagSet.
This function will look at the attributes ending with '_tags' and
generate proper helping methods, that return True if the given tag
is in the list, and False otherwise. If an attribute is of the form
'functions_as_foo_tags', __init__ will generate a method called
'functions_as_foo(tag)'; otherwise, if it's of the form 'foo_tags',
it will generate a method called 'is_foo(tag)'.
"""
n = len('_tags')
def is_in(lst, token):
return token[1] in lst
for attr in dir(self):
if attr.endswith('_tags'):
if attr.startswith('functions_as'):
attr_name = attr[:-n]
else:
attr_name = 'is_' + attr[:-n]
lst = getattr(self, attr)
setattr(self, attr_name, partial(is_in, lst))
def get_coarse_tag(self, tag):
"""Get the coarse tag corresponding to a fine tag.
:tag: the fine tag.
:returns: the corresponding coarse tag, or the tag itself if there is
no corresponding coarse tag in the mapping.
"""
return self.fine_to_coarse[tag] if tag in self.fine_to_coarse else tag
| gpl-3.0 | 136,833,219,321,596,450 | 33.346457 | 80 | 0.632967 | false | 4.042632 | false | false | false |
piyushc79/RedisClusterCache | setup.py | 1 | 1472 | from distutils.core import setup
setup(
name = 'dj_rcc',
version = '1.0.0',
packages = ['dj_rcc'],
description='Redis Cluster Cache library for redis 3.0.0 built on top of redis-py-cluster',
author = 'Piyush Chourasiya',
author_email = '[email protected]',
maintainer='Piyush Chourasiya',
maintainer_email='[email protected]',
url = 'https://github.com/piyushc79/RedisClusterCache/dj_rcc/',
download_url = 'https://github.com/piyushc79/RedisClusterCache/dj_rcc/',
install_requires=[
'redis>=2.10.2',
'redis-py-cluster==1.0.0',
],
keywords=[
'redis',
'redis cluster',
],
classifiers=(
# As from https://pypi.python.org/pypi?%3Aaction=list_classifiers
# 'Development Status :: 1 - Planning',
# 'Development Status :: 2 - Pre-Alpha',
# 'Development Status :: 3 - Alpha',
# 'Development Status :: 4 - Beta',
'Development Status :: 5 - Production/Stable',
# 'Development Status :: 6 - Mature',
# 'Development Status :: 7 - Inactive',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Environment :: Web Environment',
'Operating System :: POSIX',
)
)
| gpl-3.0 | 3,698,967,182,028,895,700 | 35.8 | 93 | 0.603261 | false | 3.643564 | false | false | false |
build18-fpga-on-the-web/server | scratch/foo.py | 1 | 3828 | import logging
import tornado.escape
import tornado.ioloop
import tornado.web
import os.path
import uuid
from tornado.concurrent import Future
from tornado import gen
from tornado.options import define, options, parse_command_line
define("port", default=8888, help="run on the given port", type=int)
define("debug", default=False, help="run in debug mode")
class MessageBuffer(object):
def __init__(self):
self.waiters = set()
self.cache = []
self.cache_size = 200
def wait_for_messages(self, cursor=None):
# Construct a Future to return to our caller. This allows
# wait_for_messages to be yielded from a coroutine even though
# it is not a coroutine itself. We will set the result of the
# Future when results are available.
result_future = Future()
if cursor:
new_count = 0
for msg in reversed(self.cache):
if msg["id"] == cursor:
break
new_count += 1
if new_count:
result_future.set_result(self.cache[-new_count:])
return result_future
self.waiters.add(result_future)
return result_future
def cancel_wait(self, future):
self.waiters.remove(future)
# Set an empty result to unblock any coroutines waiting.
future.set_result([])
def new_messages(self, messages):
logging.info("Sending new message to %r listeners", len(self.waiters))
for future in self.waiters:
future.set_result(messages)
self.waiters = set()
self.cache.extend(messages)
if len(self.cache) > self.cache_size:
self.cache = self.cache[-self.cache_size:]
# Making this a non-singleton is left as an exercise for the reader.
global_message_buffer = MessageBuffer()
class BaseHandler(tornado.web.RequestHandler):
def get_current_user(self):
return "Bob"
class MainHandler(BaseHandler):
def get(self):
self.render("index.html", messages=global_message_buffer.cache)
class MessageNewHandler(BaseHandler):
def post(self):
message = {
"id": str(uuid.uuid4()),
"from": self.current_user,
"body": self.get_argument("body"),
}
# to_basestring is necessary for Python 3's json encoder,
# which doesn't accept byte strings.
message["html"] = tornado.escape.to_basestring(
self.render_string("message.html", message=message))
if self.get_argument("next", None):
self.redirect(self.get_argument("next"))
else:
self.write(message)
global_message_buffer.new_messages([message])
class MessageUpdatesHandler(BaseHandler):
@gen.coroutine
def post(self):
cursor = self.get_argument("cursor", None)
# Save the future returned by wait_for_messages so we can cancel
# it in wait_for_messages
self.future = global_message_buffer.wait_for_messages(cursor=cursor)
messages = yield self.future
if self.request.connection.stream.closed():
return
self.write(dict(messages=messages))
def on_connection_close(self):
global_message_buffer.cancel_wait(self.future)
def main():
parse_command_line()
app = tornado.web.Application(
[
(r"/", MainHandler),
(r"/a/message/new", MessageNewHandler),
(r"/a/message/updates", MessageUpdatesHandler),
],
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
debug=options.debug,
)
app.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
| mit | 2,735,976,283,617,163,000 | 30.9 | 78 | 0.618861 | false | 4.016789 | false | false | false |
EFForg/privacybadgerchrome | tests/selenium/clobbering_test.py | 2 | 3915 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import unittest
import pbtest
class ClobberingTest(pbtest.PBSeleniumTest):
def test_localstorage_clobbering(self):
LOCALSTORAGE_TESTS = [
# (test result element ID, expected stored, expected empty)
('get-item', "qwerty", "null"),
('get-property', "asdf", "undefined"),
('get-item-proto', "qwerty", "null"),
('get-item-srcdoc', "qwerty", "null"),
('get-property-srcdoc', "asdf", "undefined"),
('get-item-frames', "qwerty", "null"),
('get-property-frames', "asdf", "undefined"),
]
# page loads a frame that writes to and reads from localStorage
# TODO remove delays from fixture once race condition (https://crbug.com/478183) is fixed
FIXTURE_URL = "https://privacybadger-tests.eff.org/html/clobbering.html"
FRAME_DOMAIN = "efforg.github.io"
# first allow localStorage to be set
self.load_url(FIXTURE_URL)
self.wait_for_and_switch_to_frame('iframe')
for selector, expected, _ in LOCALSTORAGE_TESTS:
# wait for each test to run
self.wait_for_script(
"return document.getElementById('%s')"
".textContent != '...';" % selector,
timeout=2,
message=(
"Timed out waiting for localStorage (%s) to finish ... "
"This probably means the fixture "
"errored out somewhere." % selector
)
)
self.assertEqual(
self.txt_by_css("#" + selector), expected,
"localStorage (%s) was not read successfully"
"for some reason" % selector
)
# mark the frame domain for cookieblocking
self.cookieblock_domain(FRAME_DOMAIN)
# now rerun and check results for various localStorage access tests
self.load_url(FIXTURE_URL)
self.wait_for_and_switch_to_frame('iframe')
for selector, _, expected in LOCALSTORAGE_TESTS:
# wait for each test to run
self.wait_for_script(
"return document.getElementById('%s')"
".textContent != '...';" % selector,
timeout=2,
message=(
"Timed out waiting for localStorage (%s) to finish ... "
"This probably means the fixture "
"errored out somewhere." % selector
)
)
self.assertEqual(
self.txt_by_css("#" + selector), expected,
"localStorage (%s) was read despite cookieblocking" % selector
)
def test_referrer_header(self):
FIXTURE_URL = (
"https://efforg.github.io/privacybadger-test-fixtures/html/"
"referrer.html"
)
THIRD_PARTY_DOMAIN = "httpbin.org"
def verify_referrer_header(expected, failure_message):
self.load_url(FIXTURE_URL)
self.wait_for_script(
"return document.getElementById('referrer').textContent != '';")
referrer = self.txt_by_css("#referrer")
self.assertEqual(referrer[0:8], "Referer=", "Unexpected page output")
self.assertEqual(referrer[8:], expected, failure_message)
# verify base case
verify_referrer_header(
FIXTURE_URL,
"Unexpected default referrer header"
)
# cookieblock the domain fetched by the fixture
self.cookieblock_domain(THIRD_PARTY_DOMAIN)
# recheck what the referrer header looks like now after cookieblocking
verify_referrer_header(
"https://efforg.github.io/",
"Referrer header does not appear to be origin-only"
)
if __name__ == "__main__":
unittest.main()
| gpl-3.0 | 8,279,231,285,994,046,000 | 37.382353 | 97 | 0.551724 | false | 4.232432 | true | false | false |
ric2b/Vivaldi-browser | chromium/gpu/command_buffer/build_cmd_buffer_lib.py | 2 | 264144 | # Copyright (c) 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Common code generator for command buffers."""
import errno
import itertools
import os
import os.path
import re
import platform
from subprocess import call
_SIZE_OF_UINT32 = 4
_SIZE_OF_COMMAND_HEADER = 4
_FIRST_SPECIFIC_COMMAND_ID = 256
_LICENSE = """// Copyright %s The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
"""
_DO_NOT_EDIT_WARNING = """// This file is auto-generated from
// gpu/command_buffer/build_%s_cmd_buffer.py
// It's formatted by clang-format using chromium coding style:
// clang-format -i -style=chromium filename
// DO NOT EDIT!
"""
# This string is copied directly out of the gl2.h file from GLES2.0
#
# Edits:
#
# *) Any argument that is a resourceID has been changed to GLid<Type>.
# (not pointer arguments) and if it's allowed to be zero it's GLidZero<Type>
# If it's allowed to not exist it's GLidBind<Type>
#
# *) All GLenums have been changed to GLenumTypeOfEnum
#
_GL_TYPES = {
'GLenum': 'unsigned int',
'GLboolean': 'unsigned char',
'GLbitfield': 'unsigned int',
'GLbyte': 'signed char',
'GLshort': 'short',
'GLint': 'int',
'GLsizei': 'int',
'GLubyte': 'unsigned char',
'GLushort': 'unsigned short',
'GLuint': 'unsigned int',
'GLfloat': 'float',
'GLclampf': 'float',
'GLvoid': 'void',
'GLfixed': 'int',
'GLclampx': 'int'
}
_GL_TYPES_32 = {
'GLintptr': 'long int',
'GLsizeiptr': 'long int'
}
_GL_TYPES_64 = {
'GLintptr': 'long long int',
'GLsizeiptr': 'long long int'
}
_ETC_COMPRESSED_TEXTURE_FORMATS = [
'GL_COMPRESSED_R11_EAC',
'GL_COMPRESSED_SIGNED_R11_EAC',
'GL_COMPRESSED_RG11_EAC',
'GL_COMPRESSED_SIGNED_RG11_EAC',
'GL_COMPRESSED_RGB8_ETC2',
'GL_COMPRESSED_SRGB8_ETC2',
'GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2',
'GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2',
'GL_COMPRESSED_RGBA8_ETC2_EAC',
'GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC',
]
# This table specifies the different pepper interfaces that are supported for
# GL commands. 'dev' is true if it's a dev interface.
_PEPPER_INTERFACES = [
{'name': '', 'dev': False},
{'name': 'InstancedArrays', 'dev': False},
{'name': 'FramebufferBlit', 'dev': False},
{'name': 'FramebufferMultisample', 'dev': False},
{'name': 'ChromiumEnableFeature', 'dev': False},
{'name': 'ChromiumMapSub', 'dev': False},
{'name': 'Query', 'dev': False},
{'name': 'VertexArrayObject', 'dev': False},
{'name': 'DrawBuffers', 'dev': True},
]
# Capabilities selected with glEnable
# on_change: string of C++ code that is executed when the state is changed.
_CAPABILITY_FLAGS = [
{'name': 'blend'},
{'name': 'cull_face'},
{'name': 'depth_test',
'on_change': 'framebuffer_state_.clear_state_dirty = true;'},
{'name': 'dither', 'default': True},
{'name': 'framebuffer_srgb_ext', 'default': True, 'no_init': True,
'extension_flag': 'ext_srgb_write_control'},
{'name': 'polygon_offset_fill'},
{'name': 'sample_alpha_to_coverage'},
{'name': 'sample_coverage'},
{'name': 'scissor_test'},
{'name': 'stencil_test',
'on_change': '''state_.stencil_state_changed_since_validation = true;
framebuffer_state_.clear_state_dirty = true;'''},
{'name': 'rasterizer_discard', 'es3': True},
{'name': 'primitive_restart_fixed_index', 'es3': True},
{'name': 'multisample_ext', 'default': True,
'extension_flag': 'ext_multisample_compatibility'},
{'name': 'sample_alpha_to_one_ext',
'extension_flag': 'ext_multisample_compatibility'},
]
_STATE_INFO = {
'ClearColor': {
'type': 'Normal',
'func': 'ClearColor',
'enum': 'GL_COLOR_CLEAR_VALUE',
'states': [
{'name': 'color_clear_red', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'color_clear_green', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'color_clear_blue', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'color_clear_alpha', 'type': 'GLfloat', 'default': '0.0f'},
],
},
'ClearDepthf': {
'type': 'Normal',
'func': 'ClearDepth',
'enum': 'GL_DEPTH_CLEAR_VALUE',
'states': [
{'name': 'depth_clear', 'type': 'GLclampf', 'default': '1.0f'},
],
},
'ColorMask': {
'type': 'Normal',
'func': 'ColorMask',
'enum': 'GL_COLOR_WRITEMASK',
'states': [
{
'name': 'color_mask_red',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
{
'name': 'color_mask_green',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
{
'name': 'color_mask_blue',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
{
'name': 'color_mask_alpha',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
],
'on_change': 'framebuffer_state_.clear_state_dirty = true;',
},
'ClearStencil': {
'type': 'Normal',
'func': 'ClearStencil',
'enum': 'GL_STENCIL_CLEAR_VALUE',
'states': [
{'name': 'stencil_clear', 'type': 'GLint', 'default': '0'},
],
},
'CoverageModulationCHROMIUM': {
'type': 'Normal',
'func': 'CoverageModulationNV',
'extension_flag': 'chromium_framebuffer_mixed_samples',
'states': [
{ 'enum': 'GL_COVERAGE_MODULATION_CHROMIUM',
'name': 'coverage_modulation',
'type': 'GLenum',
'default': 'GL_NONE',
},
]
},
'BlendColor': {
'type': 'Normal',
'func': 'BlendColor',
'enum': 'GL_BLEND_COLOR',
'states': [
{'name': 'blend_color_red', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'blend_color_green', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'blend_color_blue', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'blend_color_alpha', 'type': 'GLfloat', 'default': '0.0f'},
],
},
'BlendEquation': {
'type': 'SrcDst',
'func': 'BlendEquationSeparate',
'states': [
{
'name': 'blend_equation_rgb',
'type': 'GLenum',
'enum': 'GL_BLEND_EQUATION_RGB',
'default': 'GL_FUNC_ADD',
},
{
'name': 'blend_equation_alpha',
'type': 'GLenum',
'enum': 'GL_BLEND_EQUATION_ALPHA',
'default': 'GL_FUNC_ADD',
},
],
},
'BlendFunc': {
'type': 'SrcDst',
'func': 'BlendFuncSeparate',
'states': [
{
'name': 'blend_source_rgb',
'type': 'GLenum',
'enum': 'GL_BLEND_SRC_RGB',
'default': 'GL_ONE',
},
{
'name': 'blend_dest_rgb',
'type': 'GLenum',
'enum': 'GL_BLEND_DST_RGB',
'default': 'GL_ZERO',
},
{
'name': 'blend_source_alpha',
'type': 'GLenum',
'enum': 'GL_BLEND_SRC_ALPHA',
'default': 'GL_ONE',
},
{
'name': 'blend_dest_alpha',
'type': 'GLenum',
'enum': 'GL_BLEND_DST_ALPHA',
'default': 'GL_ZERO',
},
],
},
'PolygonOffset': {
'type': 'Normal',
'func': 'PolygonOffset',
'states': [
{
'name': 'polygon_offset_factor',
'type': 'GLfloat',
'enum': 'GL_POLYGON_OFFSET_FACTOR',
'default': '0.0f',
},
{
'name': 'polygon_offset_units',
'type': 'GLfloat',
'enum': 'GL_POLYGON_OFFSET_UNITS',
'default': '0.0f',
},
],
},
'CullFace': {
'type': 'Normal',
'func': 'CullFace',
'enum': 'GL_CULL_FACE_MODE',
'states': [
{
'name': 'cull_mode',
'type': 'GLenum',
'default': 'GL_BACK',
},
],
},
'FrontFace': {
'type': 'Normal',
'func': 'FrontFace',
'enum': 'GL_FRONT_FACE',
'states': [{'name': 'front_face', 'type': 'GLenum', 'default': 'GL_CCW'}],
},
'DepthFunc': {
'type': 'Normal',
'func': 'DepthFunc',
'enum': 'GL_DEPTH_FUNC',
'states': [{'name': 'depth_func', 'type': 'GLenum', 'default': 'GL_LESS'}],
},
'DepthRange': {
'type': 'Normal',
'func': 'DepthRange',
'enum': 'GL_DEPTH_RANGE',
'states': [
{'name': 'z_near', 'type': 'GLclampf', 'default': '0.0f'},
{'name': 'z_far', 'type': 'GLclampf', 'default': '1.0f'},
],
},
'SampleCoverage': {
'type': 'Normal',
'func': 'SampleCoverage',
'states': [
{
'name': 'sample_coverage_value',
'type': 'GLclampf',
'enum': 'GL_SAMPLE_COVERAGE_VALUE',
'default': '1.0f',
},
{
'name': 'sample_coverage_invert',
'type': 'GLboolean',
'enum': 'GL_SAMPLE_COVERAGE_INVERT',
'default': 'false',
},
],
},
'StencilMask': {
'type': 'FrontBack',
'func': 'StencilMaskSeparate',
'states': [
{
'name': 'stencil_front_writemask',
'type': 'GLuint',
'enum': 'GL_STENCIL_WRITEMASK',
'default': '0xFFFFFFFFU',
'cached': True,
},
{
'name': 'stencil_back_writemask',
'type': 'GLuint',
'enum': 'GL_STENCIL_BACK_WRITEMASK',
'default': '0xFFFFFFFFU',
'cached': True,
},
],
'on_change': '''framebuffer_state_.clear_state_dirty = true;
state_.stencil_state_changed_since_validation = true;''',
},
'StencilOp': {
'type': 'FrontBack',
'func': 'StencilOpSeparate',
'states': [
{
'name': 'stencil_front_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_front_z_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_PASS_DEPTH_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_front_z_pass_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_PASS_DEPTH_PASS',
'default': 'GL_KEEP',
},
{
'name': 'stencil_back_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_back_z_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_PASS_DEPTH_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_back_z_pass_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_PASS_DEPTH_PASS',
'default': 'GL_KEEP',
},
],
},
'StencilFunc': {
'type': 'FrontBack',
'func': 'StencilFuncSeparate',
'states': [
{
'name': 'stencil_front_func',
'type': 'GLenum',
'enum': 'GL_STENCIL_FUNC',
'default': 'GL_ALWAYS',
},
{
'name': 'stencil_front_ref',
'type': 'GLint',
'enum': 'GL_STENCIL_REF',
'default': '0',
},
{
'name': 'stencil_front_mask',
'type': 'GLuint',
'enum': 'GL_STENCIL_VALUE_MASK',
'default': '0xFFFFFFFFU',
},
{
'name': 'stencil_back_func',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_FUNC',
'default': 'GL_ALWAYS',
},
{
'name': 'stencil_back_ref',
'type': 'GLint',
'enum': 'GL_STENCIL_BACK_REF',
'default': '0',
},
{
'name': 'stencil_back_mask',
'type': 'GLuint',
'enum': 'GL_STENCIL_BACK_VALUE_MASK',
'default': '0xFFFFFFFFU',
},
],
'on_change': 'state_.stencil_state_changed_since_validation = true;',
},
'Hint': {
'type': 'NamedParameter',
'func': 'Hint',
'states': [
{
'name': 'hint_generate_mipmap',
'type': 'GLenum',
'enum': 'GL_GENERATE_MIPMAP_HINT',
'default': 'GL_DONT_CARE',
'gl_version_flag': '!is_desktop_core_profile'
},
{
'name': 'hint_fragment_shader_derivative',
'type': 'GLenum',
'enum': 'GL_FRAGMENT_SHADER_DERIVATIVE_HINT_OES',
'default': 'GL_DONT_CARE',
'extension_flag': 'oes_standard_derivatives'
},
{
'name': 'hint_texture_filtering',
'type': 'GLenum',
'enum': 'GL_TEXTURE_FILTERING_HINT_CHROMIUM',
'default': 'GL_NICEST',
'extension_flag': 'chromium_texture_filtering_hint'
}
],
},
'PixelStore': {
'type': 'NamedParameter',
'func': 'PixelStorei',
'states': [
{
'name': 'pack_alignment',
'type': 'GLint',
'enum': 'GL_PACK_ALIGNMENT',
'default': '4'
},
{
'name': 'unpack_alignment',
'type': 'GLint',
'enum': 'GL_UNPACK_ALIGNMENT',
'default': '4'
},
{
'name': 'pack_row_length',
'type': 'GLint',
'enum': 'GL_PACK_ROW_LENGTH',
'default': '0',
'es3': True,
'manual': True,
},
{
'name': 'pack_skip_pixels',
'type': 'GLint',
'enum': 'GL_PACK_SKIP_PIXELS',
'default': '0',
'es3': True,
'manual': True,
},
{
'name': 'pack_skip_rows',
'type': 'GLint',
'enum': 'GL_PACK_SKIP_ROWS',
'default': '0',
'es3': True,
'manual': True,
},
{
'name': 'unpack_row_length',
'type': 'GLint',
'enum': 'GL_UNPACK_ROW_LENGTH',
'default': '0',
'es3': True,
'manual': True,
},
{
'name': 'unpack_image_height',
'type': 'GLint',
'enum': 'GL_UNPACK_IMAGE_HEIGHT',
'default': '0',
'es3': True,
'manual': True,
},
{
'name': 'unpack_skip_pixels',
'type': 'GLint',
'enum': 'GL_UNPACK_SKIP_PIXELS',
'default': '0',
'es3': True,
'manual': True,
},
{
'name': 'unpack_skip_rows',
'type': 'GLint',
'enum': 'GL_UNPACK_SKIP_ROWS',
'default': '0',
'es3': True,
'manual': True,
},
{
'name': 'unpack_skip_images',
'type': 'GLint',
'enum': 'GL_UNPACK_SKIP_IMAGES',
'default': '0',
'es3': True,
'manual': True,
}
],
},
# TODO: Consider implemenenting these states
# GL_ACTIVE_TEXTURE
'LineWidth': {
'type': 'Normal',
'custom_function' : True,
'func': 'DoLineWidth',
'enum': 'GL_LINE_WIDTH',
'states': [
{
'name': 'line_width',
'type': 'GLfloat',
'default': '1.0f',
'range_checks': [{'check': "<= 0.0f", 'test_value': "0.0f"}],
'nan_check': True,
}],
},
'DepthMask': {
'type': 'Normal',
'func': 'DepthMask',
'enum': 'GL_DEPTH_WRITEMASK',
'states': [
{
'name': 'depth_mask',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
],
'on_change': 'framebuffer_state_.clear_state_dirty = true;',
},
'Scissor': {
'type': 'Normal',
'func': 'Scissor',
'enum': 'GL_SCISSOR_BOX',
'states': [
# NOTE: These defaults reset at GLES2DecoderImpl::Initialization.
{
'name': 'scissor_x',
'type': 'GLint',
'default': '0',
},
{
'name': 'scissor_y',
'type': 'GLint',
'default': '0',
},
{
'name': 'scissor_width',
'type': 'GLsizei',
'default': '1',
'expected': 'initial_size.width()',
},
{
'name': 'scissor_height',
'type': 'GLsizei',
'default': '1',
'expected': 'initial_size.height()',
},
],
},
'Viewport': {
'type': 'Normal',
'func': 'Viewport',
'enum': 'GL_VIEWPORT',
'states': [
# NOTE: These defaults reset at GLES2DecoderImpl::Initialization.
{
'name': 'viewport_x',
'type': 'GLint',
'default': '0',
},
{
'name': 'viewport_y',
'type': 'GLint',
'default': '0',
},
{
'name': 'viewport_width',
'type': 'GLsizei',
'default': '1',
'expected': 'initial_size.width()',
},
{
'name': 'viewport_height',
'type': 'GLsizei',
'default': '1',
'expected': 'initial_size.height()',
},
],
},
'MatrixValuesCHROMIUM': {
'type': 'NamedParameter',
'func': 'MatrixLoadfEXT',
'states': [
{ 'enum': 'GL_PATH_MODELVIEW_MATRIX_CHROMIUM',
'enum_set': 'GL_PATH_MODELVIEW_CHROMIUM',
'name': 'modelview_matrix',
'type': 'GLfloat',
'default': [
'1.0f', '0.0f','0.0f','0.0f',
'0.0f', '1.0f','0.0f','0.0f',
'0.0f', '0.0f','1.0f','0.0f',
'0.0f', '0.0f','0.0f','1.0f',
],
'extension_flag': 'chromium_path_rendering',
},
{ 'enum': 'GL_PATH_PROJECTION_MATRIX_CHROMIUM',
'enum_set': 'GL_PATH_PROJECTION_CHROMIUM',
'name': 'projection_matrix',
'type': 'GLfloat',
'default': [
'1.0f', '0.0f','0.0f','0.0f',
'0.0f', '1.0f','0.0f','0.0f',
'0.0f', '0.0f','1.0f','0.0f',
'0.0f', '0.0f','0.0f','1.0f',
],
'extension_flag': 'chromium_path_rendering',
},
],
},
'PathStencilFuncCHROMIUM': {
'type': 'Normal',
'func': 'PathStencilFuncNV',
'extension_flag': 'chromium_path_rendering',
'states': [
{
'name': 'stencil_path_func',
'type': 'GLenum',
'enum': 'GL_PATH_STENCIL_FUNC_CHROMIUM',
'default': 'GL_ALWAYS',
},
{
'name': 'stencil_path_ref',
'type': 'GLint',
'enum': 'GL_PATH_STENCIL_REF_CHROMIUM',
'default': '0',
},
{
'name': 'stencil_path_mask',
'type': 'GLuint',
'enum': 'GL_PATH_STENCIL_VALUE_MASK_CHROMIUM',
'default': '0xFFFFFFFFU',
},
],
},
'WindowRectanglesEXT': {
'type': 'Normal',
'func': 'WindowRectanglesEXT',
'custom_function': True,
'extension_flag': 'ext_window_rectangles',
'no_init': True,
'states': [
{
'name': 'window_rectangles_mode',
'type': 'GLenum',
'enum': 'GL_WINDOW_RECTANGLE_MODE_EXT',
'default': 'GL_EXCLUSIVE_EXT',
},
{
'name': 'num_window_rectangles',
'type': 'GLint',
'enum': 'GL_NUM_WINDOW_RECTANGLES_EXT',
'default': '0',
},
],
},
}
_prefix = None
_upper_prefix = None
_lower_prefix = None
def InitializePrefix(mixed_case_prefix):
"""Initialize prefix used for autogenerated code.
Must be called before autogenerating code. Prefixes are used by autogenerated
code in many places: class names, filenames, namespaces, constants,
defines. Given a single mixed case prefix suitable for a class name, we also
initialize lower and upper case prefixes for other uses (e.g. filenames and
#defines).
"""
global _prefix
if _prefix:
raise AssertionError
_prefix = mixed_case_prefix
global _upper_prefix
_upper_prefix = mixed_case_prefix.upper()
global _lower_prefix
_lower_prefix = mixed_case_prefix.lower()
def _Namespace():
if _lower_prefix != 'gles2':
return 'gles2::'
return ''
def Grouper(n, iterable, fillvalue=None):
"""Collect data into fixed-length chunks or blocks"""
args = [iter(iterable)] * n
return itertools.izip_longest(fillvalue=fillvalue, *args)
def SplitWords(input_string):
"""Split by '_' if found, otherwise split at uppercase/numeric chars.
Will split "some_TEXT" into ["some", "TEXT"], "CamelCase" into ["Camel",
"Case"], and "Vector3" into ["Vector", "3"].
"""
if input_string.find('_') > -1:
# 'some_TEXT_' -> 'some TEXT'
return input_string.replace('_', ' ').strip().split()
else:
input_string = input_string.replace('::', ' ')
if re.search('[A-Z]', input_string) and re.search('[a-z]', input_string):
# mixed case.
# look for capitalization to cut input_strings
# 'SomeText' -> 'Some Text'
input_string = re.sub('([A-Z])', r' \1', input_string).strip()
# 'Vector3' -> 'Vector 3'
input_string = re.sub('([^0-9])([0-9])', r'\1 \2', input_string)
return input_string.split()
def ToUnderscore(input_string):
"""converts CamelCase to camel_case."""
words = SplitWords(input_string)
return '_'.join([word.lower() for word in words])
def ValidatorClassName(type_name):
"""Converts some::namespace::TypeName to SomeNamespaceTypeNameValidator."""
words = SplitWords(type_name)
prefix = ''.join([word.title() for word in words])
return '%sValidator' % prefix
def CachedStateName(item):
if item.get('cached', False):
return 'cached_' + item['name']
return item['name']
def GuardState(state, operation, feature_info):
if 'manual' in state:
assert state['manual']
return ""
result = []
result_end = []
if 'es3' in state:
assert state['es3']
result.append(" if (%s->IsES3Capable()) {\n" % feature_info);
result_end.append(" }\n")
if 'extension_flag' in state:
result.append(" if (%s->feature_flags().%s) {\n " %
(feature_info, state['extension_flag']))
result_end.append(" }\n")
if 'gl_version_flag' in state:
name = state['gl_version_flag']
inverted = ''
if name[0] == '!':
inverted = '!'
name = name[1:]
result.append(" if (%s%s->gl_version_info().%s) {\n" %
(inverted, feature_info, name))
result_end.append(" }\n")
result.append(operation)
return ''.join(result + result_end)
def ToGLExtensionString(extension_flag):
"""Returns GL-type extension string of a extension flag."""
if extension_flag == "oes_compressed_etc1_rgb8_texture":
return "OES_compressed_ETC1_RGB8_texture" # Fixup inconsitency with rgb8,
# unfortunate.
uppercase_words = [ 'img', 'ext', 'arb', 'chromium', 'oes', 'amd', 'bgra8888',
'egl', 'atc', 'etc1', 'angle']
parts = extension_flag.split('_')
return "_".join(
[part.upper() if part in uppercase_words else part for part in parts])
def ToCamelCase(input_string):
"""converts ABC_underscore_case to ABCUnderscoreCase."""
return ''.join(w[0].upper() + w[1:] for w in input_string.split('_'))
def EnumsConflict(a, b):
"""Returns true if the enums have different names (ignoring suffixes) and one
of them is a Chromium enum."""
if a == b:
return False
if b.endswith('_CHROMIUM'):
a, b = b, a
if not a.endswith('_CHROMIUM'):
return False
def removesuffix(string, suffix):
if not string.endswith(suffix):
return string
return string[:-len(suffix)]
b = removesuffix(b, "_NV")
b = removesuffix(b, "_EXT")
b = removesuffix(b, "_OES")
return removesuffix(a, "_CHROMIUM") != b
def GetGLGetTypeConversion(result_type, value_type, value):
"""Makes a gl compatible type conversion string for accessing state variables.
Useful when accessing state variables through glGetXXX calls.
glGet documetation (for example, the manual pages):
[...] If glGetIntegerv is called, [...] most floating-point values are
rounded to the nearest integer value. [...]
Args:
result_type: the gl type to be obtained
value_type: the GL type of the state variable
value: the name of the state variable
Returns:
String that converts the state variable to desired GL type according to GL
rules.
"""
if result_type == 'GLint':
if value_type == 'GLfloat':
return 'static_cast<GLint>(round(%s))' % value
return 'static_cast<%s>(%s)' % (result_type, value)
class CWriter(object):
"""Context manager that creates a C source file.
To be used with the `with` statement. Returns a normal `file` type, open only
for writing - any existing files with that name will be overwritten. It will
automatically write the contents of `_LICENSE` and `_DO_NOT_EDIT_WARNING`
at the beginning.
Example:
with CWriter("file.cpp") as myfile:
myfile.write("hello")
# type(myfile) == file
"""
def __init__(self, filename, year):
self.filename = filename
self._ENTER_MSG = _LICENSE % year + _DO_NOT_EDIT_WARNING % _lower_prefix
self._EXIT_MSG = ""
try:
os.makedirs(os.path.dirname(filename))
except OSError as e:
if e.errno == errno.EEXIST:
pass
self._file = open(filename, 'wb')
def __enter__(self):
self._file.write(self._ENTER_MSG)
return self._file
def __exit__(self, exc_type, exc_value, traceback):
self._file.write(self._EXIT_MSG)
self._file.close()
class CHeaderWriter(CWriter):
"""Context manager that creates a C header file.
Works the same way as CWriter, except it will also add the #ifdef guard
around it. If `file_comment` is set, it will write that before the #ifdef
guard.
"""
def __init__(self, filename, year, file_comment=None):
super(CHeaderWriter, self).__init__(filename, year)
guard = self._get_guard()
if file_comment is None:
file_comment = ""
self._ENTER_MSG = self._ENTER_MSG + file_comment \
+ "#ifndef %s\n#define %s\n\n" % (guard, guard)
self._EXIT_MSG = self._EXIT_MSG + "#endif // %s\n" % guard
def _get_guard(self):
non_alnum_re = re.compile(r'[^a-zA-Z0-9]')
assert self.filename.startswith("gpu/")
return non_alnum_re.sub('_', self.filename).upper() + '_'
class TypeHandler(object):
"""This class emits code for a particular type of function."""
_remove_expected_call_re = re.compile(r' EXPECT_CALL.*?;\n', re.S)
def InitFunction(self, func):
"""Add or adjust anything type specific for this function."""
if func.GetInfo('needs_size') and not func.name.endswith('Bucket'):
func.AddCmdArg(DataSizeArgument('data_size'))
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
return func.num_pointer_args >= 1
def WriteStruct(self, func, f):
"""Writes a structure that matches the arguments to a function."""
comment = func.GetInfo('cmd_comment')
if not comment == None:
f.write(comment)
f.write("struct %s {\n" % func.name)
f.write(" typedef %s ValueType;\n" % func.name)
f.write(" static const CommandId kCmdId = k%s;\n" % func.name)
func.WriteCmdArgFlag(f)
func.WriteCmdFlag(f)
f.write("\n")
result = func.GetInfo('result')
if not result == None:
if len(result) == 1:
f.write(" typedef %s Result;\n\n" % result[0])
else:
f.write(" struct Result {\n")
for line in result:
f.write(" %s;\n" % line)
f.write(" };\n\n")
func.WriteCmdComputeSize(f)
func.WriteCmdSetHeader(f)
func.WriteCmdInit(f)
func.WriteCmdSet(f)
func.WriteArgAccessors(f)
f.write(" gpu::CommandHeader header;\n")
total_args = 0
args = func.GetCmdArgs()
for arg in args:
for cmd_type, name in arg.GetArgDecls():
f.write(" %s %s;\n" % (cmd_type, name))
total_args += 1
trace_queue = func.GetInfo('trace_queueing_flow', False)
if trace_queue:
f.write(" uint32_t trace_id;\n")
total_args += 1
consts = func.GetCmdConstants()
for const in consts:
const_decls = const.GetArgDecls()
assert(len(const_decls) == 1)
const_cmd_type, const_name = const_decls[0]
f.write(" static const %s %s = %s;\n" %
(const_cmd_type, const_name, const.GetConstantValue()))
f.write("};\n")
f.write("\n")
size = total_args * _SIZE_OF_UINT32 + _SIZE_OF_COMMAND_HEADER
f.write("static_assert(sizeof(%s) == %d,\n" % (func.name, size))
f.write(" \"size of %s should be %d\");\n" %
(func.name, size))
f.write("static_assert(offsetof(%s, header) == 0,\n" % func.name)
f.write(" \"offset of %s header should be 0\");\n" %
func.name)
offset = _SIZE_OF_COMMAND_HEADER
for arg in args:
for _, name in arg.GetArgDecls():
f.write("static_assert(offsetof(%s, %s) == %d,\n" %
(func.name, name, offset))
f.write(" \"offset of %s %s should be %d\");\n" %
(func.name, name, offset))
offset += _SIZE_OF_UINT32
if not result == None and len(result) > 1:
offset = 0;
for line in result:
parts = line.split()
name = parts[-1]
check = """
static_assert(offsetof(%(cmd_name)s::Result, %(field_name)s) == %(offset)d,
"offset of %(cmd_name)s Result %(field_name)s should be "
"%(offset)d");
"""
f.write((check.strip() + "\n") % {
'cmd_name': func.name,
'field_name': name,
'offset': offset,
})
offset += _SIZE_OF_UINT32
f.write("\n")
def WriteHandlerImplementation(self, func, f):
"""Writes the handler implementation for this command."""
args = []
for arg in func.GetOriginalArgs():
if arg.name.endswith("size") and arg.type == "GLsizei":
args.append("num_%s" % func.GetLastOriginalArg().name)
elif arg.name == "length":
args.append("nullptr")
else:
args.append(arg.name)
if func.GetInfo('type') == 'GETn' and func.name != 'GetSynciv':
args.append('num_values')
f.write(" %s(%s);\n" %
(func.GetGLFunctionName(), ", ".join(args)))
def WriteCmdSizeTest(self, _func, f):
"""Writes the size test for a command."""
f.write(" EXPECT_EQ(sizeof(cmd), cmd.header.size * 4u);\n")
def WriteFormatTest(self, func, f):
"""Writes a format test for a command."""
f.write("TEST_F(%sFormatTest, %s) {\n" % (_prefix, func.name))
f.write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
f.write(" void* next_cmd = cmd.Set(\n")
f.write(" &cmd")
args = func.GetCmdArgs()
for value, arg in enumerate(args):
f.write(",\n static_cast<%s>(%d)" % (arg.type, value + 11))
f.write(");\n")
f.write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
f.write(" cmd.header.command);\n")
func.type_handler.WriteCmdSizeTest(func, f)
for value, arg in enumerate(args):
f.write(" EXPECT_EQ(static_cast<%s>(%d), %s);\n" %
(arg.type, value + 11, arg.GetArgAccessor('cmd')))
f.write(" CheckBytesWrittenMatchesExpectedSize(\n")
f.write(" next_cmd, sizeof(cmd));\n")
f.write("}\n")
f.write("\n")
def WriteImmediateFormatTest(self, func, f):
"""Writes a format test for an immediate version of a command."""
pass
def WriteGetDataSizeCode(self, func, arg, f):
"""Writes the code to set data_size used in validation"""
pass
def WriteImmediateHandlerImplementation (self, func, f):
"""Writes the handler impl for the immediate version of a command."""
f.write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
def WriteBucketHandlerImplementation (self, func, f):
"""Writes the handler impl for the bucket version of a command."""
f.write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
def WriteServiceHandlerFunctionHeader(self, func, f):
"""Writes function header for service implementation handlers."""
f.write("""error::Error %(prefix)sDecoderImpl::Handle%(name)s(
uint32_t immediate_data_size, const volatile void* cmd_data) {
""" % {'name': func.name, 'prefix' : _prefix})
if func.IsES3():
f.write("""if (!feature_info_->IsWebGL2OrES3OrHigherContext())
return error::kUnknownCommand;
""")
if func.IsES31():
f.write("""return error::kUnknownCommand;
}
""")
return
if func.GetCmdArgs():
f.write("""const volatile %(prefix)s::cmds::%(name)s& c =
*static_cast<const volatile %(prefix)s::cmds::%(name)s*>(cmd_data);
""" % {'name': func.name, 'prefix': _lower_prefix})
def WriteServiceHandlerArgGetCode(self, func, f):
"""Writes the argument unpack code for service handlers."""
if len(func.GetOriginalArgs()) > 0:
for arg in func.GetOriginalArgs():
if not arg.IsPointer():
arg.WriteGetCode(f)
# Write pointer arguments second. Sizes may be dependent on other args
for arg in func.GetOriginalArgs():
if arg.IsPointer():
self.WriteGetDataSizeCode(func, arg, f)
arg.WriteGetCode(f)
def WriteImmediateServiceHandlerArgGetCode(self, func, f):
"""Writes the argument unpack code for immediate service handlers."""
for arg in func.GetOriginalArgs():
if arg.IsPointer():
self.WriteGetDataSizeCode(func, arg, f)
arg.WriteGetCode(f)
def WriteBucketServiceHandlerArgGetCode(self, func, f):
"""Writes the argument unpack code for bucket service handlers."""
for arg in func.GetCmdArgs():
arg.WriteGetCode(f)
for arg in func.GetOriginalArgs():
if arg.IsConstant():
arg.WriteGetCode(f)
self.WriteGetDataSizeCode(func, arg, f)
def WriteServiceImplementation(self, func, f):
"""Writes the service implementation for a command."""
self.WriteServiceHandlerFunctionHeader(func, f)
if func.IsES31():
return
self.WriteHandlerExtensionCheck(func, f)
self.WriteHandlerDeferReadWrite(func, f);
self.WriteServiceHandlerArgGetCode(func, f)
func.WriteHandlerValidation(f)
func.WriteQueueTraceEvent(f)
func.WriteHandlerImplementation(f)
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WriteImmediateServiceImplementation(self, func, f):
"""Writes the service implementation for an immediate version of command."""
self.WriteServiceHandlerFunctionHeader(func, f)
if func.IsES31():
return
self.WriteHandlerExtensionCheck(func, f)
self.WriteHandlerDeferReadWrite(func, f);
self.WriteImmediateServiceHandlerArgGetCode(func, f)
func.WriteHandlerValidation(f)
func.WriteQueueTraceEvent(f)
func.WriteHandlerImplementation(f)
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WriteBucketServiceImplementation(self, func, f):
"""Writes the service implementation for a bucket version of command."""
self.WriteServiceHandlerFunctionHeader(func, f)
if func.IsES31():
return
self.WriteHandlerExtensionCheck(func, f)
self.WriteHandlerDeferReadWrite(func, f);
self.WriteBucketServiceHandlerArgGetCode(func, f)
func.WriteHandlerValidation(f)
func.WriteQueueTraceEvent(f)
func.WriteHandlerImplementation(f)
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WritePassthroughServiceFunctionHeader(self, func, f):
"""Writes function header for service passthrough handlers."""
f.write("""error::Error GLES2DecoderPassthroughImpl::Handle%(name)s(
uint32_t immediate_data_size, const volatile void* cmd_data) {
""" % {'name': func.name})
if func.IsES3():
f.write("""if (!feature_info_->IsWebGL2OrES3OrHigherContext())
return error::kUnknownCommand;
""")
if func.IsES31():
f.write("""if (!feature_info_->IsWebGL2ComputeContext())
return error::kUnknownCommand;
""")
if func.GetCmdArgs():
f.write("""const volatile gles2::cmds::%(name)s& c =
*static_cast<const volatile gles2::cmds::%(name)s*>(cmd_data);
""" % {'name': func.name})
def WritePassthroughServiceFunctionDoerCall(self, func, f):
"""Writes the function call to the passthrough service doer."""
f.write(""" error::Error error = Do%(name)s(%(args)s);
if (error != error::kNoError) {
return error;
}""" % {'name': func.original_name,
'args': func.MakePassthroughServiceDoerArgString("")})
def WritePassthroughServiceImplementation(self, func, f):
"""Writes the service implementation for a command."""
self.WritePassthroughServiceFunctionHeader(func, f)
self.WriteHandlerExtensionCheck(func, f)
self.WriteServiceHandlerArgGetCode(func, f)
func.WritePassthroughHandlerValidation(f)
self.WritePassthroughServiceFunctionDoerCall(func, f)
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WritePassthroughImmediateServiceImplementation(self, func, f):
"""Writes the service implementation for a command."""
self.WritePassthroughServiceFunctionHeader(func, f)
self.WriteHandlerExtensionCheck(func, f)
self.WriteImmediateServiceHandlerArgGetCode(func, f)
func.WritePassthroughHandlerValidation(f)
self.WritePassthroughServiceFunctionDoerCall(func, f)
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WritePassthroughBucketServiceImplementation(self, func, f):
"""Writes the service implementation for a command."""
self.WritePassthroughServiceFunctionHeader(func, f)
self.WriteHandlerExtensionCheck(func, f)
self.WriteBucketServiceHandlerArgGetCode(func, f)
func.WritePassthroughHandlerValidation(f)
self.WritePassthroughServiceFunctionDoerCall(func, f)
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WriteHandlerExtensionCheck(self, func, f):
if func.GetInfo('extension_flag'):
f.write(" if (!features().%s) {\n" % func.GetInfo('extension_flag'))
f.write(" return error::kUnknownCommand;")
f.write(" }\n\n")
def WriteHandlerDeferReadWrite(self, func, f):
"""Writes the code to handle deferring reads or writes."""
defer_draws = func.GetInfo('defer_draws')
defer_reads = func.GetInfo('defer_reads')
if defer_draws or defer_reads:
f.write(" error::Error error;\n")
if defer_draws:
f.write(" error = WillAccessBoundFramebufferForDraw();\n")
f.write(" if (error != error::kNoError)\n")
f.write(" return error;\n")
if defer_reads:
f.write(" error = WillAccessBoundFramebufferForRead();\n")
f.write(" if (error != error::kNoError)\n")
f.write(" return error;\n")
def WriteValidUnitTest(self, func, f, test, *extras):
"""Writes a valid unit test for the service implementation."""
if not func.GetInfo('expectation', True):
test = self._remove_expected_call_re.sub('', test)
name = func.name
arg_strings = [
arg.GetValidArg(func) \
for arg in func.GetOriginalArgs() if not arg.IsConstant()
]
gl_arg_strings = [
arg.GetValidGLArg(func) \
for arg in func.GetOriginalArgs()
]
gl_func_name = func.GetGLTestFunctionName()
varz = {
'name': name,
'gl_func_name': gl_func_name,
'args': ", ".join(arg_strings),
'gl_args': ", ".join(gl_arg_strings),
}
for extra in extras:
varz.update(extra)
old_test = ""
while (old_test != test):
old_test = test
test = test % varz
f.write(test % varz)
def WriteInvalidUnitTest(self, func, f, test, *extras):
"""Writes an invalid unit test for the service implementation."""
if func.IsES3():
return
for invalid_arg_index, invalid_arg in enumerate(func.GetOriginalArgs()):
# Service implementation does not test constants, as they are not part of
# the call in the service side.
if invalid_arg.IsConstant():
continue
num_invalid_values = invalid_arg.GetNumInvalidValues(func)
for value_index in range(0, num_invalid_values):
arg_strings = []
parse_result = "kNoError"
gl_error = None
for arg in func.GetOriginalArgs():
if arg.IsConstant():
continue
if invalid_arg is arg:
(arg_string, parse_result, gl_error) = arg.GetInvalidArg(
value_index)
else:
arg_string = arg.GetValidArg(func)
arg_strings.append(arg_string)
gl_arg_strings = []
for arg in func.GetOriginalArgs():
gl_arg_strings.append("_")
gl_func_name = func.GetGLTestFunctionName()
gl_error_test = ''
if not gl_error == None:
gl_error_test = '\n EXPECT_EQ(%s, GetGLError());' % gl_error
varz = {
'name': func.name,
'arg_index': invalid_arg_index,
'value_index': value_index,
'gl_func_name': gl_func_name,
'args': ", ".join(arg_strings),
'all_but_last_args': ", ".join(arg_strings[:-1]),
'gl_args': ", ".join(gl_arg_strings),
'parse_result': parse_result,
'gl_error_test': gl_error_test,
}
for extra in extras:
varz.update(extra)
f.write(test % varz)
def WriteServiceUnitTest(self, func, f, *extras):
"""Writes the service unit test for a command."""
if func.name == 'Enable':
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
SetupExpectationsForEnableDisable(%(gl_args)s, true);
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
elif func.name == 'Disable':
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
SetupExpectationsForEnableDisable(%(gl_args)s, false);
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
else:
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
self.WriteValidUnitTest(func, f, valid_test, *extras)
if not func.IsES3():
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, *extras)
def WriteImmediateServiceUnitTest(self, func, f, *extras):
"""Writes the service unit test for an immediate command."""
pass
def WriteImmediateValidationCode(self, func, f):
"""Writes the validation code for an immediate version of a command."""
pass
def WriteBucketServiceUnitTest(self, func, f, *extras):
"""Writes the service unit test for a bucket command."""
pass
def WriteGLES2ImplementationDeclaration(self, func, f):
"""Writes the GLES2 Implemention declaration."""
f.write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("", add_default = True)))
f.write("\n")
def WriteGLES2CLibImplementation(self, func, f):
f.write("%s GL_APIENTRY GLES2%s(%s) {\n" %
(func.return_type, func.name,
func.MakeTypedOriginalArgString("")))
result_string = "return "
if func.return_type == "void":
result_string = ""
f.write(" %sgles2::GetGLContext()->%s(%s);\n" %
(result_string, func.original_name,
func.MakeOriginalArgString("")))
f.write("}\n")
def WriteGLES2Header(self, func, f):
"""Writes a re-write macro for GLES"""
f.write("#define gl%s GLES2_GET_FUN(%s)\n" %(func.name, func.name))
def WriteClientGLCallLog(self, func, f):
"""Writes a logging macro for the client side code."""
comma = ""
if len(func.GetOriginalArgs()):
comma = " << "
f.write(
' GPU_CLIENT_LOG("[" << GetLogPrefix() << "] %s("%s%s << ")");\n' %
(func.prefixed_name, comma, func.MakeLogArgString()))
def WriteClientGLReturnLog(self, func, f):
"""Writes the return value logging code."""
if func.return_type != "void":
f.write(' GPU_CLIENT_LOG("return:" << result)\n')
def WriteGLES2ImplementationHeader(self, func, f):
"""Writes the GLES2 Implemention."""
self.WriteGLES2ImplementationDeclaration(func, f)
def WriteGLES2TraceImplementationHeader(self, func, f):
"""Writes the GLES2 Trace Implemention header."""
f.write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
def WriteGLES2TraceImplementation(self, func, f):
"""Writes the GLES2 Trace Implemention."""
f.write("%s GLES2TraceImplementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
result_string = "return "
if func.return_type == "void":
result_string = ""
f.write(' TRACE_EVENT_BINARY_EFFICIENT0("gpu", "GLES2Trace::%s");\n' %
func.name)
f.write(" %sgl_->%s(%s);\n" %
(result_string, func.name, func.MakeOriginalArgString("")))
f.write("}\n")
f.write("\n")
def WriteGLES2Implementation(self, func, f):
"""Writes the GLES2 Implemention."""
impl_func = func.GetInfo('impl_func', True)
if func.can_auto_generate and impl_func:
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
self.WriteClientGLCallLog(func, f)
func.WriteDestinationInitalizationValidation(f)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
f.write(" helper_->%s(%s);\n" %
(func.name, func.MakeHelperArgString("")))
if _prefix != 'WebGPU':
f.write(" CheckGLError();\n")
self.WriteClientGLReturnLog(func, f)
f.write("}\n")
f.write("\n")
def WriteGLES2InterfaceHeader(self, func, f):
"""Writes the GLES2 Interface."""
f.write("virtual %s %s(%s) = 0;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("", add_default = True)))
def WriteGLES2InterfaceStub(self, func, f):
"""Writes the GLES2 Interface stub declaration."""
f.write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
def WriteGLES2InterfaceStubImpl(self, func, f):
"""Writes the GLES2 Interface stub declaration."""
args = func.GetOriginalArgs()
arg_string = ", ".join(
["%s /* %s */" % (arg.type, arg.name) for arg in args])
f.write("%s %sInterfaceStub::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name, arg_string))
if func.return_type != "void":
f.write(" return 0;\n")
f.write("}\n")
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Writes the GLES2 Implemention unit test."""
client_test = func.GetInfo('client_test', True)
if func.can_auto_generate and client_test:
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
Cmds expected;
expected.cmd.Init(%(cmd_args)s);
gl_->%(name)s(%(args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
cmd_arg_strings = [
arg.GetValidClientSideCmdArg(func) for arg in func.GetCmdArgs()
]
gl_arg_strings = [
arg.GetValidClientSideArg(func) for arg in func.GetOriginalArgs()
]
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
# Test constants for invalid values, as they are not tested by the
# service.
constants = [arg for arg in func.GetOriginalArgs() if arg.IsConstant()]
if constants:
code = """
TEST_F(%(prefix)sImplementationTest,
%(name)sInvalidConstantArg%(invalid_index)d) {
gl_->%(name)s(%(args)s);
EXPECT_TRUE(NoCommandsWritten());
EXPECT_EQ(%(gl_error)s, CheckError());
}
"""
for invalid_arg in constants:
gl_arg_strings = []
invalid = invalid_arg.GetInvalidArg(func)
for arg in func.GetOriginalArgs():
if arg is invalid_arg:
gl_arg_strings.append(invalid[0])
else:
gl_arg_strings.append(arg.GetValidClientSideArg(func))
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'invalid_index': func.GetOriginalArgs().index(invalid_arg),
'args': ", ".join(gl_arg_strings),
'gl_error': invalid[2],
})
def WriteDestinationInitalizationValidation(self, func, f):
"""Writes the client side destintion initialization validation."""
for arg in func.GetOriginalArgs():
arg.WriteDestinationInitalizationValidation(f, func)
def WriteTraceEvent(self, func, f):
f.write(' TRACE_EVENT0("gpu", "%sImplementation::%s");\n' %
(_prefix, func.original_name))
def WriteImmediateCmdComputeSize(self, _func, f):
"""Writes the size computation code for the immediate version of a cmd."""
f.write(" static uint32_t ComputeSize(uint32_t size_in_bytes) {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(" sizeof(ValueType) + // NOLINT\n")
f.write(" RoundSizeToMultipleOfEntries(size_in_bytes));\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSetHeader(self, _func, f):
"""Writes the SetHeader function for the immediate version of a cmd."""
f.write(" void SetHeader(uint32_t size_in_bytes) {\n")
f.write(" header.SetCmdByTotalSize<ValueType>(size_in_bytes);\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdInit(self, func, f):
"""Writes the Init function for the immediate version of a command."""
raise NotImplementedError(func.name)
def WriteImmediateCmdSet(self, func, f):
"""Writes the Set function for the immediate version of a command."""
raise NotImplementedError(func.name)
def WriteCmdHelper(self, func, f):
"""Writes the cmd helper definition for a cmd."""
code = """ void %(name)s(%(typed_args)s) {
%(lp)s::cmds::%(name)s* c = GetCmdSpace<%(lp)s::cmds::%(name)s>();
if (c) {
c->Init(%(args)s);
}
}
"""
f.write(code % {
"lp" : _lower_prefix,
"name": func.name,
"typed_args": func.MakeTypedCmdArgString(""),
"args": func.MakeCmdArgString(""),
})
def WriteImmediateCmdHelper(self, func, f):
"""Writes the cmd helper definition for the immediate version of a cmd."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t s = 0;
%(lp)s::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<%(lp)s::cmds::%(name)s>(s);
if (c) {
c->Init(%(args)s);
}
}
"""
f.write(code % {
"lp" : _lower_prefix,
"name": func.name,
"typed_args": func.MakeTypedCmdArgString(""),
"args": func.MakeCmdArgString(""),
})
class StateSetHandler(TypeHandler):
"""Handler for commands that simply set state."""
def WriteHandlerImplementation(self, func, f):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATE_INFO[state_name]
states = state['states']
args = func.GetOriginalArgs()
for ndx,item in enumerate(states):
code = []
if 'range_checks' in item:
for range_check in item['range_checks']:
code.append("%s %s" % (args[ndx].name, range_check['check']))
if 'nan_check' in item:
# Drivers might generate an INVALID_VALUE error when a value is set
# to NaN. This is allowed behavior under GLES 3.0 section 2.1.1 or
# OpenGL 4.5 section 2.3.4.1 - providing NaN allows undefined results.
# Make this behavior consistent within Chromium, and avoid leaking GL
# errors by generating the error in the command buffer instead of
# letting the GL driver generate it.
code.append("std::isnan(%s)" % args[ndx].name)
if len(code):
f.write(" if (%s) {\n" % " ||\n ".join(code))
f.write(
' LOCAL_SET_GL_ERROR(GL_INVALID_VALUE,'
' "%s", "%s out of range");\n' %
(func.name, args[ndx].name))
f.write(" return error::kNoError;\n")
f.write(" }\n")
code = []
for ndx,item in enumerate(states):
code.append("state_.%s != %s" % (item['name'], args[ndx].name))
f.write(" if (%s) {\n" % " ||\n ".join(code))
for ndx,item in enumerate(states):
f.write(" state_.%s = %s;\n" % (item['name'], args[ndx].name))
if 'on_change' in state:
f.write(" %s\n" % state['on_change'])
if not func.GetInfo("no_gl"):
for ndx,item in enumerate(states):
if item.get('cached', False):
f.write(" state_.%s = %s;\n" %
(CachedStateName(item), args[ndx].name))
f.write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
f.write(" }\n")
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
TypeHandler.WriteServiceUnitTest(self, func, f, *extras)
state_name = func.GetInfo('state')
state = _STATE_INFO[state_name]
states = state['states']
for ndx,item in enumerate(states):
if 'range_checks' in item:
for check_ndx, range_check in enumerate(item['range_checks']):
valid_test = """
TEST_P(%(test_name)s, %(name)sInvalidValue%(ndx)d_%(check_ndx)d) {
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
name = func.name
arg_strings = [
arg.GetValidArg(func) \
for arg in func.GetOriginalArgs() if not arg.IsConstant()
]
arg_strings[ndx] = range_check['test_value']
varz = {
'name': name,
'ndx': ndx,
'check_ndx': check_ndx,
'args': ", ".join(arg_strings),
}
for extra in extras:
varz.update(extra)
f.write(valid_test % varz)
if 'nan_check' in item:
valid_test = """
TEST_P(%(test_name)s, %(name)sNaNValue%(ndx)d) {
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
name = func.name
arg_strings = [
arg.GetValidArg(func) \
for arg in func.GetOriginalArgs() if not arg.IsConstant()
]
arg_strings[ndx] = 'nanf("")'
varz = {
'name': name,
'ndx': ndx,
'args': ", ".join(arg_strings),
}
for extra in extras:
varz.update(extra)
f.write(valid_test % varz)
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class StateSetRGBAlphaHandler(TypeHandler):
"""Handler for commands that simply set state that have rgb/alpha."""
def WriteHandlerImplementation(self, func, f):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATE_INFO[state_name]
states = state['states']
args = func.GetOriginalArgs()
num_args = len(args)
code = []
for ndx,item in enumerate(states):
code.append("state_.%s != %s" % (item['name'], args[ndx % num_args].name))
f.write(" if (%s) {\n" % " ||\n ".join(code))
for ndx, item in enumerate(states):
f.write(" state_.%s = %s;\n" %
(item['name'], args[ndx % num_args].name))
if 'on_change' in state:
f.write(" %s\n" % state['on_change'])
if not func.GetInfo("no_gl"):
f.write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
f.write(" }\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class StateSetFrontBackSeparateHandler(TypeHandler):
"""Handler for commands that simply set state that have front/back."""
def WriteHandlerImplementation(self, func, f):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATE_INFO[state_name]
states = state['states']
args = func.GetOriginalArgs()
face = args[0].name
num_args = len(args)
f.write(" bool changed = false;\n")
for group_ndx, group in enumerate(Grouper(num_args - 1, states)):
f.write(" if (%s == %s || %s == GL_FRONT_AND_BACK) {\n" %
(face, ('GL_FRONT', 'GL_BACK')[group_ndx], face))
code = []
for ndx, item in enumerate(group):
code.append("state_.%s != %s" % (item['name'], args[ndx + 1].name))
f.write(" changed |= %s;\n" % " ||\n ".join(code))
f.write(" }\n")
f.write(" if (changed) {\n")
for group_ndx, group in enumerate(Grouper(num_args - 1, states)):
f.write(" if (%s == %s || %s == GL_FRONT_AND_BACK) {\n" %
(face, ('GL_FRONT', 'GL_BACK')[group_ndx], face))
for ndx, item in enumerate(group):
f.write(" state_.%s = %s;\n" %
(item['name'], args[ndx + 1].name))
f.write(" }\n")
if 'on_change' in state:
f.write(" %s\n" % state['on_change'])
if not func.GetInfo("no_gl"):
f.write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
f.write(" }\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class StateSetFrontBackHandler(TypeHandler):
"""Handler for commands that simply set state that set both front/back."""
def WriteHandlerImplementation(self, func, f):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATE_INFO[state_name]
states = state['states']
args = func.GetOriginalArgs()
num_args = len(args)
code = []
for group in Grouper(num_args, states):
for ndx, item in enumerate(group):
code.append("state_.%s != %s" % (item['name'], args[ndx].name))
f.write(" if (%s) {\n" % " ||\n ".join(code))
for group in Grouper(num_args, states):
for ndx, item in enumerate(group):
f.write(" state_.%s = %s;\n" % (item['name'], args[ndx].name))
if 'on_change' in state:
f.write(" %s\n" % state['on_change'])
if not func.GetInfo("no_gl"):
f.write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
f.write(" }\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class StateSetNamedParameter(TypeHandler):
"""Handler for commands that set a state chosen with an enum parameter."""
def WriteHandlerImplementation(self, func, f):
"""Overridden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATE_INFO[state_name]
states = state['states']
args = func.GetOriginalArgs()
num_args = len(args)
assert num_args == 2
f.write(" switch (%s) {\n" % args[0].name)
for state in states:
f.write(" case %s:\n" % state['enum'])
f.write(" if (state_.%s != %s) {\n" %
(state['name'], args[1].name))
f.write(" state_.%s = %s;\n" % (state['name'], args[1].name))
if not func.GetInfo("no_gl"):
operation = " %s(%s);\n" % \
(func.GetGLFunctionName(), func.MakeOriginalArgString(""))
f.write(GuardState(state, operation, "feature_info_"))
f.write(" }\n")
f.write(" break;\n")
f.write(" default:\n")
f.write(" NOTREACHED();\n")
f.write(" }\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class CustomHandler(TypeHandler):
"""Handler for commands that are auto-generated but require minor tweaks."""
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
if (func.name.startswith('CompressedTex') and func.name.endswith('Bucket')):
# Remove imageSize argument, take the size from the bucket instead.
func.cmd_args = [arg for arg in func.cmd_args if arg.name != 'imageSize']
func.AddCmdArg(Argument('bucket_id', 'GLuint'))
else:
TypeHandler.InitFunction(self, func)
def WriteServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
if func.IsES31():
TypeHandler.WriteServiceImplementation(self, func, f)
def WriteImmediateServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
if func.IsES31():
TypeHandler.WriteImmediateServiceImplementation(self, func, f)
def WriteBucketServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
if func.IsES31():
TypeHandler.WriteBucketServiceImplementation(self, func, f)
def WritePassthroughServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WritePassthroughImmediateServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WritePassthroughBucketServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdGetTotalSize(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(
" uint32_t total_size = 0; // WARNING: compute correct size.\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
f.write(" void Init(%s) {\n" % func.MakeTypedCmdArgString("_"))
self.WriteImmediateCmdGetTotalSize(func, f)
f.write(" SetHeader(total_size);\n")
args = func.GetCmdArgs()
for arg in args:
arg.WriteSetCode(f, 4, '_%s' % arg.name)
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
copy_args = func.MakeCmdArgString("_", False)
f.write(" void* Set(void* cmd%s) {\n" %
func.MakeTypedCmdArgString("_", True))
self.WriteImmediateCmdGetTotalSize(func, f)
f.write(" static_cast<ValueType*>(cmd)->Init(%s);\n" % copy_args)
f.write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, total_size);\n")
f.write(" }\n")
f.write("\n")
class NoCommandHandler(CustomHandler):
"""Handler for functions that don't use commands"""
def WriteGLES2Implementation(self, func, f):
pass
def WriteGLES2ImplementationUnitTest(self, func, f):
pass
class DataHandler(TypeHandler):
"""
Handler for glBufferData, glBufferSubData, glTex{Sub}Image*D.
"""
def WriteGetDataSizeCode(self, func, arg, f):
"""Overrriden from TypeHandler."""
# TODO: Move this data to _FUNCTION_INFO?
name = func.name
if name.endswith("Immediate"):
name = name[0:-9]
if arg.name in func.size_args:
size = func.size_args[arg.name]
f.write(" uint32_t %s = %s;\n" % (arg.GetReservedSizeId(), size))
else:
f.write("// uint32_t %s = 0; // WARNING: compute correct size.\n" % (
arg.GetReservedSizeId()))
def WriteImmediateCmdGetTotalSize(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
f.write(" void Init(%s) {\n" % func.MakeTypedCmdArgString("_"))
self.WriteImmediateCmdGetTotalSize(func, f)
f.write(" SetHeader(total_size);\n")
args = func.GetCmdArgs()
for arg in args:
f.write(" %s = _%s;\n" % (arg.name, arg.name))
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
copy_args = func.MakeCmdArgString("_", False)
f.write(" void* Set(void* cmd%s) {\n" %
func.MakeTypedCmdArgString("_", True))
self.WriteImmediateCmdGetTotalSize(func, f)
f.write(" static_cast<ValueType*>(cmd)->Init(%s);\n" % copy_args)
f.write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, total_size);\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateFormatTest(self, func, f):
"""Overrriden from TypeHandler."""
# TODO: Remove this exception.
return
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class BindHandler(TypeHandler):
"""Handler for glBind___ type functions."""
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
if len(func.GetOriginalArgs()) == 1:
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
if func.GetInfo("gen_func"):
valid_test += """
TEST_P(%(test_name)s, %(name)sValidArgsNewId) {
EXPECT_CALL(*gl_, %(gl_func_name)s(kNewServiceId));
EXPECT_CALL(*gl_, %(gl_gen_func_name)s(1, _))
.WillOnce(SetArgPointee<1>(kNewServiceId));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(kNewClientId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_type)s(kNewClientId) != nullptr);
}
"""
self.WriteValidUnitTest(func, f, valid_test, {
'resource_type': func.GetOriginalArgs()[0].resource_type,
'gl_gen_func_name': func.GetInfo("gen_func"),
}, *extras)
else:
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
if func.GetInfo("gen_func"):
valid_test += """
TEST_P(%(test_name)s, %(name)sValidArgsNewId) {
EXPECT_CALL(*gl_,
%(gl_func_name)s(%(gl_args_with_new_id)s));
EXPECT_CALL(*gl_, %(gl_gen_func_name)s(1, _))
.WillOnce(SetArgPointee<1>(kNewServiceId));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args_with_new_id)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_type)s(kNewClientId) != nullptr);
}
"""
gl_args_with_new_id = []
args_with_new_id = []
for arg in func.GetOriginalArgs():
if hasattr(arg, 'resource_type'):
gl_args_with_new_id.append('kNewServiceId')
args_with_new_id.append('kNewClientId')
else:
gl_args_with_new_id.append(arg.GetValidGLArg(func))
args_with_new_id.append(arg.GetValidArg(func))
self.WriteValidUnitTest(func, f, valid_test, {
'args_with_new_id': ", ".join(args_with_new_id),
'gl_args_with_new_id': ", ".join(gl_args_with_new_id),
'resource_type': func.GetResourceIdArg().resource_type,
'gl_gen_func_name': func.GetInfo("gen_func"),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, *extras)
def WriteGLES2Implementation(self, func, f):
"""Writes the GLES2 Implemention."""
impl_func = func.GetInfo('impl_func', True)
if func.can_auto_generate and impl_func:
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
code = """ if (Is%(type)sReservedId(%(id)s)) {
SetGLError(GL_INVALID_OPERATION, "%(name)s\", \"%(id)s reserved id");
return;
}
%(name)sHelper(%(arg_string)s);
CheckGLError();
}
"""
name_arg = func.GetResourceIdArg()
f.write(code % {
'name': func.name,
'arg_string': func.MakeOriginalArgString(""),
'id': name_arg.name,
'type': name_arg.resource_type,
'lc_type': name_arg.resource_type.lower(),
})
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Overrriden from TypeHandler."""
client_test = func.GetInfo('client_test', True)
if not client_test:
return
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
Cmds expected;
expected.cmd.Init(%(cmd_args)s);
gl_->%(name)s(%(args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));"""
if not func.IsES3():
code += """
ClearCommands();
gl_->%(name)s(%(args)s);
EXPECT_TRUE(NoCommandsWritten());"""
code += """
}
"""
cmd_arg_strings = [
arg.GetValidClientSideCmdArg(func) for arg in func.GetCmdArgs()
]
gl_arg_strings = [
arg.GetValidClientSideArg(func) for arg in func.GetOriginalArgs()
]
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class GENnHandler(TypeHandler):
"""Handler for glGen___ type functions."""
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
pass
def WriteGetDataSizeCode(self, func, arg, f):
"""Overrriden from TypeHandler."""
code = """ uint32_t %(data_size)s;
if (!base::CheckMul(n, sizeof(GLuint)).AssignIfValid(&%(data_size)s)) {
return error::kOutOfBounds;
}
""" % {'data_size': arg.GetReservedSizeId()}
f.write(code)
def WriteHandlerImplementation (self, func, f):
"""Overrriden from TypeHandler."""
raise NotImplementedError("GENn functions are immediate")
def WriteImmediateHandlerImplementation(self, func, f):
"""Overrriden from TypeHandler."""
param_name = func.GetLastOriginalArg().name
f.write(" auto %(name)s_copy = std::make_unique<GLuint[]>(n);\n"
" GLuint* %(name)s_safe = %(name)s_copy.get();\n"
" std::copy(%(name)s, %(name)s + n, %(name)s_safe);\n"
" if (!%(ns)sCheckUniqueAndNonNullIds(n, %(name)s_safe) ||\n"
" !%(func)sHelper(n, %(name)s_safe)) {\n"
" return error::kInvalidArguments;\n"
" }\n" % {'name': param_name,
'func': func.original_name,
'ns': _Namespace()})
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
log_code = (""" GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei i = 0; i < n; ++i) {
GPU_CLIENT_LOG(" " << i << ": " << %s[i]);
}
});""" % func.GetOriginalArgs()[1].name)
args = {
'log_code': log_code,
'return_type': func.return_type,
'prefix' : _prefix,
'name': func.original_name,
'typed_args': func.MakeTypedOriginalArgString(""),
'args': func.MakeOriginalArgString(""),
'resource_types': func.GetInfo('resource_types'),
'count_name': func.GetOriginalArgs()[0].name,
}
f.write(
"%(return_type)s %(prefix)sImplementation::"
"%(name)s(%(typed_args)s) {\n" %
args)
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
not_shared = func.GetInfo('not_shared')
if not_shared:
alloc_code = ("""\
IdAllocator* id_allocator = GetIdAllocator(IdNamespaces::k%s);
for (GLsizei ii = 0; ii < n; ++ii)
%s[ii] = id_allocator->AllocateID();""" %
(func.GetInfo('resource_types'), func.GetOriginalArgs()[1].name))
else:
alloc_code = ("""\
GetIdHandler(SharedIdNamespaces::k%(resource_types)s)->
MakeIds(this, 0, %(args)s);""" % args)
args['alloc_code'] = alloc_code
code = """\
GPU_CLIENT_SINGLE_THREAD_CHECK();
%(alloc_code)s
%(name)sHelper(%(args)s);
helper_->%(name)sImmediate(%(args)s);
"""
if not not_shared:
code += """\
if (share_group_->bind_generates_resource())
helper_->CommandBufferHelper::Flush();
"""
code += """\
%(log_code)s
CheckGLError();
}
"""
f.write(code % args)
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Overrriden from TypeHandler."""
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
GLuint ids[2] = { 0, };
struct Cmds {
cmds::%(name)sImmediate gen;
GLuint data[2];
};
Cmds expected;
expected.gen.Init(base::size(ids), &ids[0]);
expected.data[0] = k%(types)sStartId;
expected.data[1] = k%(types)sStartId + 1;
gl_->%(name)s(base::size(ids), &ids[0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
EXPECT_EQ(k%(types)sStartId, ids[0]);
EXPECT_EQ(k%(types)sStartId + 1, ids[1]);
}
"""
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'types': func.GetInfo('resource_types'),
})
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
raise NotImplementedError("GENn functions are immediate")
def WriteImmediateServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(1, _))
.WillOnce(SetArgPointee<1>(kNewServiceId));
cmds::%(name)s* cmd = GetImmediateAs<cmds::%(name)s>();
GLuint temp = kNewClientId;
SpecializedSetup<cmds::%(name)s, 0>(true);
cmd->Init(1, &temp);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(*cmd, sizeof(temp)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_name)s(kNewClientId) != nullptr);
}
"""
self.WriteValidUnitTest(func, f, valid_test, {
'resource_name': func.GetInfo('resource_type'),
}, *extras)
duplicate_id_test = """
TEST_P(%(test_name)s, %(name)sDuplicateOrNullIds) {
EXPECT_CALL(*gl_, %(gl_func_name)s(_, _)).Times(0);
cmds::%(name)s* cmd = GetImmediateAs<cmds::%(name)s>();
GLuint temp[3] = {kNewClientId, kNewClientId + 1, kNewClientId};
SpecializedSetup<cmds::%(name)s, 1>(true);
cmd->Init(3, temp);
EXPECT_EQ(error::kInvalidArguments,
ExecuteImmediateCmd(*cmd, sizeof(temp)));
EXPECT_TRUE(Get%(resource_name)s(kNewClientId) == nullptr);
EXPECT_TRUE(Get%(resource_name)s(kNewClientId + 1) == nullptr);
GLuint null_id[2] = {kNewClientId, 0};
cmd->Init(2, null_id);
EXPECT_EQ(error::kInvalidArguments,
ExecuteImmediateCmd(*cmd, sizeof(temp)));
EXPECT_TRUE(Get%(resource_name)s(kNewClientId) == nullptr);
}
"""
self.WriteValidUnitTest(func, f, duplicate_id_test, {
'resource_name': func.GetInfo('resource_type'),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(_, _)).Times(0);
cmds::%(name)s* cmd = GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(false);
cmd->Init(1, &client_%(resource_name)s_id_);
EXPECT_EQ(error::kInvalidArguments,
ExecuteImmediateCmd(*cmd, sizeof(&client_%(resource_name)s_id_)));
}
"""
self.WriteValidUnitTest(func, f, invalid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
}, *extras)
def WriteImmediateCmdComputeSize(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(" static uint32_t ComputeDataSize(GLsizei _n) {\n")
f.write(
" return static_cast<uint32_t>(sizeof(GLuint) * _n); // NOLINT\n")
f.write(" }\n")
f.write("\n")
f.write(" static uint32_t ComputeSize(GLsizei _n) {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(" sizeof(ValueType) + ComputeDataSize(_n)); // NOLINT\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSetHeader(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(" void SetHeader(GLsizei _n) {\n")
f.write(" header.SetCmdByTotalSize<ValueType>(ComputeSize(_n));\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
f.write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
f.write(" SetHeader(_n);\n")
args = func.GetCmdArgs()
for arg in args:
f.write(" %s = _%s;\n" % (arg.name, arg.name))
f.write(" memcpy(ImmediateDataAddress(this),\n")
f.write(" _%s, ComputeDataSize(_n));\n" % last_arg.name)
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
f.write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
f.write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
f.write(" const uint32_t size = ComputeSize(_n);\n")
f.write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdHelper(self, func, f):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = %(lp)s::cmds::%(name)s::ComputeSize(n);
%(lp)s::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<%(lp)s::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
f.write(code % {
"lp" : _lower_prefix,
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, f):
"""Overrriden from TypeHandler."""
f.write("TEST_F(%sFormatTest, %s) {\n" % (_prefix, func.name))
f.write(" static GLuint ids[] = { 12, 23, 34, };\n")
f.write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
f.write(" void* next_cmd = cmd.Set(\n")
f.write(" &cmd, static_cast<GLsizei>(base::size(ids)), ids);\n")
f.write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
f.write(" cmd.header.command);\n")
f.write(" EXPECT_EQ(sizeof(cmd) +\n")
f.write(" RoundSizeToMultipleOfEntries(cmd.n * 4u),\n")
f.write(" cmd.header.size * 4u);\n")
f.write(" EXPECT_EQ(static_cast<GLsizei>(base::size(ids)), cmd.n);\n");
f.write(" CheckBytesWrittenMatchesExpectedSize(\n")
f.write(" next_cmd, sizeof(cmd) +\n")
f.write(" RoundSizeToMultipleOfEntries(base::size(ids) * 4u));\n")
f.write(" EXPECT_EQ(0, memcmp(ids, ImmediateDataAddress(&cmd),\n")
f.write(" sizeof(ids)));\n")
f.write("}\n")
f.write("\n")
class CreateHandler(TypeHandler):
"""Handler for glCreate___ type functions."""
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
func.AddCmdArg(Argument("client_id", 'uint32_t'))
def __GetResourceType(self, func):
if func.return_type == "GLsync":
return "Sync"
else:
return func.name[6:] # Create*
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
%(id_type_cast)sEXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s))
.WillOnce(Return(%(const_service_id)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)skNewClientId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsES3():
valid_test += """
%(return_type)s service_id = 0;
EXPECT_TRUE(Get%(resource_type)sServiceId(kNewClientId, &service_id));
EXPECT_EQ(%(const_service_id)s, service_id);
}
"""
else:
valid_test += """
EXPECT_TRUE(Get%(resource_type)s(kNewClientId));
}
"""
comma = ""
cmd_arg_count = 0
for arg in func.GetOriginalArgs():
if not arg.IsConstant():
cmd_arg_count += 1
if cmd_arg_count:
comma = ", "
if func.return_type == 'GLsync':
id_type_cast = ("const GLsync kNewServiceIdGLuint = reinterpret_cast"
"<GLsync>(kNewServiceId);\n ")
const_service_id = "kNewServiceIdGLuint"
else:
id_type_cast = ""
const_service_id = "kNewServiceId"
self.WriteValidUnitTest(func, f, valid_test, {
'comma': comma,
'resource_type': self.__GetResourceType(func),
'return_type': func.return_type,
'id_type_cast': id_type_cast,
'const_service_id': const_service_id,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)skNewClientId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, {
'comma': comma,
}, *extras)
def WriteHandlerImplementation (self, func, f):
"""Overrriden from TypeHandler."""
if func.IsES3():
code = """ uint32_t client_id = c.client_id;
%(return_type)s service_id = 0;
if (group_->Get%(resource_name)sServiceId(client_id, &service_id)) {
return error::kInvalidArguments;
}
service_id = %(gl_func_name)s(%(gl_args)s);
if (service_id) {
group_->Add%(resource_name)sId(client_id, service_id);
}
"""
else:
code = """ uint32_t client_id = c.client_id;
if (Get%(resource_name)s(client_id)) {
return error::kInvalidArguments;
}
%(return_type)s service_id = %(gl_func_name)s(%(gl_args)s);
if (service_id) {
Create%(resource_name)s(client_id, service_id%(gl_args_with_comma)s);
}
"""
f.write(code % {
'resource_name': self.__GetResourceType(func),
'return_type': func.return_type,
'gl_func_name': func.GetGLFunctionName(),
'gl_args': func.MakeOriginalArgString(""),
'gl_args_with_comma': func.MakeOriginalArgString("", True) })
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
f.write(" GLuint client_id;\n")
not_shared = func.GetInfo('not_shared')
if not_shared:
f.write('IdAllocator* id_allocator = GetIdAllocator(IdNamespaces::k%s);' %
func.GetInfo('resource_types'))
f.write('client_id = id_allocator->AllocateID();')
else:
if func.return_type == "GLsync":
f.write(
" GetIdHandler(SharedIdNamespaces::kSyncs)->\n")
else:
f.write(
" GetIdHandler(SharedIdNamespaces::kProgramsAndShaders)->\n")
f.write(" MakeIds(this, 0, 1, &client_id);\n")
f.write(" helper_->%s(%s);\n" %
(func.name, func.MakeCmdArgString("")))
f.write(' GPU_CLIENT_LOG("returned " << client_id);\n')
f.write(" CheckGLError();\n")
if func.return_type == "GLsync":
f.write(" return reinterpret_cast<GLsync>(client_id);\n")
else:
f.write(" return client_id;\n")
f.write("}\n")
f.write("\n")
def WritePassthroughServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class DeleteHandler(TypeHandler):
"""Handler for glDelete___ single resource type functions."""
def WriteServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
if func.IsES3() or func.IsES31():
TypeHandler.WriteServiceImplementation(self, func, f)
# HandleDeleteShader and HandleDeleteProgram are manually written.
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
f.write(
" if (%s == 0)\n return;" % func.GetOriginalArgs()[-1].name);
f.write(" %sHelper(%s);\n" %
(func.original_name, func.GetOriginalArgs()[-1].name))
f.write(" CheckGLError();\n")
f.write("}\n")
f.write("\n")
def WriteHandlerImplementation (self, func, f):
"""Overrriden from TypeHandler."""
assert len(func.GetOriginalArgs()) == 1
arg = func.GetOriginalArgs()[0]
f.write(" %sHelper(%s);\n" % (func.original_name, arg.name))
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class DELnHandler(TypeHandler):
"""Handler for glDelete___ type functions."""
def WriteGetDataSizeCode(self, func, arg, f):
"""Overrriden from TypeHandler."""
code = """ uint32_t %(data_size)s;
if (!base::CheckMul(n, sizeof(GLuint)).AssignIfValid(&%(data_size)s)) {
return error::kOutOfBounds;
}
""" % {'data_size': arg.GetReservedSizeId()}
f.write(code)
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Overrriden from TypeHandler."""
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
GLuint ids[2] = { k%(types)sStartId, k%(types)sStartId + 1 };
struct Cmds {
cmds::%(name)sImmediate del;
GLuint data[2];
};
Cmds expected;
expected.del.Init(base::size(ids), &ids[0]);
expected.data[0] = k%(types)sStartId;
expected.data[1] = k%(types)sStartId + 1;
gl_->%(name)s(base::size(ids), &ids[0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'types': func.GetInfo('resource_types'),
})
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(
*gl_,
%(gl_func_name)s(1, Pointee(kService%(upper_resource_name)sId)))
.Times(1);
GetSharedMemoryAs<GLuint*>()[0] = client_%(resource_name)s_id_;
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(
Get%(upper_resource_name)s(client_%(resource_name)s_id_) == nullptr);
}
"""
self.WriteValidUnitTest(func, f, valid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
'upper_resource_name': func.GetInfo('resource_type'),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
GetSharedMemoryAs<GLuint*>()[0] = kInvalidClientId;
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
}
"""
self.WriteValidUnitTest(func, f, invalid_test, *extras)
def WriteImmediateServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(
*gl_,
%(gl_func_name)s(1, Pointee(kService%(upper_resource_name)sId)))
.Times(1);
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(true);
cmd.Init(1, &client_%(resource_name)s_id_);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(client_%(resource_name)s_id_)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(
Get%(upper_resource_name)s(client_%(resource_name)s_id_) == nullptr);
}
"""
self.WriteValidUnitTest(func, f, valid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
'upper_resource_name': func.GetInfo('resource_type'),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(false);
GLuint temp = kInvalidClientId;
cmd.Init(1, &temp);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
}
"""
self.WriteValidUnitTest(func, f, invalid_test, *extras)
def WriteHandlerImplementation (self, func, f):
"""Overrriden from TypeHandler."""
f.write(" %sHelper(n, %s);\n" %
(func.name, func.GetLastOriginalArg().name))
def WriteImmediateHandlerImplementation (self, func, f):
"""Overrriden from TypeHandler."""
f.write(" %sHelper(n, %s);\n" %
(func.original_name, func.GetLastOriginalArg().name))
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
impl_func = func.GetInfo('impl_func', True)
if impl_func:
args = {
'return_type': func.return_type,
'prefix' : _prefix,
'name': func.original_name,
'typed_args': func.MakeTypedOriginalArgString(""),
'args': func.MakeOriginalArgString(""),
'resource_type': func.GetInfo('resource_type').lower(),
'count_name': func.GetOriginalArgs()[0].name,
}
f.write(
"%(return_type)s %(prefix)sImplementation::"
"%(name)s(%(typed_args)s) {\n" %
args)
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
f.write(""" GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei i = 0; i < n; ++i) {
GPU_CLIENT_LOG(" " << i << ": " << %s[i]);
}
});
""" % func.GetOriginalArgs()[1].name)
f.write(""" GPU_CLIENT_DCHECK_CODE_BLOCK({
for (GLsizei i = 0; i < n; ++i) {
DCHECK(%s[i] != 0);
}
});
""" % func.GetOriginalArgs()[1].name)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
code = """ %(name)sHelper(%(args)s);
CheckGLError();
}
"""
f.write(code % args)
def WriteImmediateCmdComputeSize(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(" static uint32_t ComputeDataSize(GLsizei _n) {\n")
f.write(
" return static_cast<uint32_t>(sizeof(GLuint) * _n); // NOLINT\n")
f.write(" }\n")
f.write("\n")
f.write(" static uint32_t ComputeSize(GLsizei _n) {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(" sizeof(ValueType) + ComputeDataSize(_n)); // NOLINT\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSetHeader(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(" void SetHeader(GLsizei _n) {\n")
f.write(" header.SetCmdByTotalSize<ValueType>(ComputeSize(_n));\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
f.write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
f.write(" SetHeader(_n);\n")
args = func.GetCmdArgs()
for arg in args:
f.write(" %s = _%s;\n" % (arg.name, arg.name))
f.write(" memcpy(ImmediateDataAddress(this),\n")
f.write(" _%s, ComputeDataSize(_n));\n" % last_arg.name)
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
f.write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
f.write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
f.write(" const uint32_t size = ComputeSize(_n);\n")
f.write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdHelper(self, func, f):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = %(lp)s::cmds::%(name)s::ComputeSize(n);
%(lp)s::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<%(lp)s::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
f.write(code % {
"lp" : _lower_prefix,
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, f):
"""Overrriden from TypeHandler."""
f.write("TEST_F(%sFormatTest, %s) {\n" % (_prefix, func.name))
f.write(" static GLuint ids[] = { 12, 23, 34, };\n")
f.write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
f.write(" void* next_cmd = cmd.Set(\n")
f.write(" &cmd, static_cast<GLsizei>(base::size(ids)), ids);\n")
f.write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
f.write(" cmd.header.command);\n")
f.write(" EXPECT_EQ(sizeof(cmd) +\n")
f.write(" RoundSizeToMultipleOfEntries(cmd.n * 4u),\n")
f.write(" cmd.header.size * 4u);\n")
f.write(" EXPECT_EQ(static_cast<GLsizei>(base::size(ids)), cmd.n);\n");
f.write(" CheckBytesWrittenMatchesExpectedSize(\n")
f.write(" next_cmd, sizeof(cmd) +\n")
f.write(" RoundSizeToMultipleOfEntries(base::size(ids) * 4u));\n")
f.write(" EXPECT_EQ(0, memcmp(ids, ImmediateDataAddress(&cmd),\n")
f.write(" sizeof(ids)));\n")
f.write("}\n")
f.write("\n")
class GETnHandler(TypeHandler):
"""Handler for GETn for glGetBooleanv, glGetFloatv, ... type functions."""
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
TypeHandler.InitFunction(self, func)
if func.name == 'GetSynciv':
return
arg_insert_point = len(func.passthrough_service_doer_args) - 1;
func.passthrough_service_doer_args.insert(
arg_insert_point, Argument('length', 'GLsizei*'))
func.passthrough_service_doer_args.insert(
arg_insert_point, Argument('bufsize', 'GLsizei'))
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
return False
def WriteServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
self.WriteServiceHandlerFunctionHeader(func, f)
if func.IsES31():
return
last_arg = func.GetLastOriginalArg()
# All except shm_id and shm_offset.
all_but_last_args = func.GetCmdArgs()[:-2]
for arg in all_but_last_args:
arg.WriteGetCode(f)
code = """ typedef cmds::%(func_name)s::Result Result;
GLsizei num_values = 0;
if (!GetNumValuesReturnedForGLGet(pname, &num_values)) {
LOCAL_SET_GL_ERROR_INVALID_ENUM(":%(func_name)s", pname, "pname");
return error::kNoError;
}
uint32_t checked_size = 0;
if (!Result::ComputeSize(num_values).AssignIfValid(&checked_size)) {
return error::kOutOfBounds;
}
Result* result = GetSharedMemoryAs<Result*>(
c.%(last_arg_name)s_shm_id, c.%(last_arg_name)s_shm_offset,
checked_size);
%(last_arg_type)s %(last_arg_name)s = result ? result->GetData() : nullptr;
"""
f.write(code % {
'last_arg_type': last_arg.type,
'last_arg_name': last_arg.name,
'func_name': func.name,
})
func.WriteHandlerValidation(f)
code = """ // Check that the client initialized the result.
if (result->size != 0) {
return error::kInvalidArguments;
}
"""
shadowed = func.GetInfo('shadowed')
if not shadowed:
f.write(' LOCAL_COPY_REAL_GL_ERRORS_TO_WRAPPER("%s");\n' % func.name)
f.write(code)
func.WriteHandlerImplementation(f)
if shadowed:
code = """ result->SetNumResults(num_values);
return error::kNoError;
}
"""
else:
code = """ GLenum error = LOCAL_PEEK_GL_ERROR("%(func_name)s");
if (error == GL_NO_ERROR) {
result->SetNumResults(num_values);
}
return error::kNoError;
}
"""
f.write(code % {'func_name': func.name})
def WritePassthroughServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
self.WritePassthroughServiceFunctionHeader(func, f)
last_arg = func.GetLastOriginalArg()
# All except shm_id and shm_offset.
all_but_last_args = func.GetCmdArgs()[:-2]
for arg in all_but_last_args:
arg.WriteGetCode(f)
code = """ unsigned int buffer_size = 0;
typedef cmds::%(func_name)s::Result Result;
Result* result = GetSharedMemoryAndSizeAs<Result*>(
c.%(last_arg_name)s_shm_id, c.%(last_arg_name)s_shm_offset,
sizeof(Result), &buffer_size);
%(last_arg_type)s %(last_arg_name)s = result ? result->GetData() : nullptr;
if (%(last_arg_name)s == nullptr) {
return error::kOutOfBounds;
}
GLsizei bufsize = Result::ComputeMaxResults(buffer_size);
GLsizei written_values = 0;
GLsizei* length = &written_values;
"""
f.write(code % {
'last_arg_type': last_arg.type,
'last_arg_name': last_arg.name,
'func_name': func.name,
})
self.WritePassthroughServiceFunctionDoerCall(func, f)
code = """ if (written_values > bufsize) {
return error::kOutOfBounds;
}
result->SetNumResults(written_values);
return error::kNoError;
}
"""
f.write(code % {'func_name': func.name})
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
impl_func = func.GetInfo('impl_func', True)
if impl_func:
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
all_but_last_args = func.GetOriginalArgs()[:-1]
args = []
has_length_arg = False
for arg in all_but_last_args:
if arg.type == 'GLsync':
args.append('ToGLuint(%s)' % arg.name)
elif arg.name.endswith('size') and arg.type == 'GLsizei':
continue
elif arg.name == 'length':
has_length_arg = True
continue
else:
args.append(arg.name)
arg_string = ", ".join(args)
all_arg_string = (
", ".join([
"%s" % arg.name
for arg in func.GetOriginalArgs() if not arg.IsConstant()]))
self.WriteTraceEvent(func, f)
code = """ if (%(func_name)sHelper(%(all_arg_string)s)) {
return;
}
typedef cmds::%(func_name)s::Result Result;
ScopedResultPtr<Result> result = GetResultAs<Result>();
if (!result) {
return;
}
result->SetNumResults(0);
helper_->%(func_name)s(%(arg_string)s,
GetResultShmId(), result.offset());
WaitForCmd();
result->CopyResult(%(last_arg_name)s);
GPU_CLIENT_LOG_CODE_BLOCK({
for (int32_t i = 0; i < result->GetNumResults(); ++i) {
GPU_CLIENT_LOG(" " << i << ": " << result->GetData()[i]);
}
});"""
if has_length_arg:
code += """
if (length) {
*length = result->GetNumResults();
}"""
code += """
CheckGLError();
}
"""
f.write(code % {
'func_name': func.name,
'arg_string': arg_string,
'all_arg_string': all_arg_string,
'last_arg_name': func.GetLastOriginalArg().name,
})
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Writes the GLES2 Implemention unit test."""
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
typedef cmds::%(name)s::Result::Type ResultType;
ResultType result = 0;
Cmds expected;
ExpectedMemoryInfo result1 = GetExpectedResultMemory(
sizeof(uint32_t) + sizeof(ResultType));
expected.cmd.Init(%(cmd_args)s, result1.id, result1.offset);
EXPECT_CALL(*command_buffer(), OnFlush())
.WillOnce(SetMemory(result1.ptr, SizedResultHelper<ResultType>(1)))
.RetiresOnSaturation();
gl_->%(name)s(%(args)s, &result);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
EXPECT_EQ(static_cast<ResultType>(1), result);
}
"""
first_cmd_arg = func.GetCmdArgs()[0].GetValidNonCachedClientSideCmdArg(func)
if not first_cmd_arg:
return
first_gl_arg = func.GetOriginalArgs()[0].GetValidNonCachedClientSideArg(
func)
cmd_arg_strings = [first_cmd_arg]
for arg in func.GetCmdArgs()[1:-2]:
cmd_arg_strings.append(arg.GetValidClientSideCmdArg(func))
gl_arg_strings = [first_gl_arg]
for arg in func.GetOriginalArgs()[1:-1]:
gl_arg_strings.append(arg.GetValidClientSideArg(func))
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, GetError())
.WillRepeatedly(Return(GL_NO_ERROR));
SpecializedSetup<cmds::%(name)s, 0>(true);
typedef cmds::%(name)s::Result Result;
Result* result = static_cast<Result*>(shared_memory_address_);
EXPECT_CALL(*gl_, %(gl_func_name)s(%(local_gl_args)s));
result->size = 0;
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(decoder_->GetGLES2Util()->GLGetNumValuesReturned(
%(valid_pname)s),
result->GetNumResults());
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
gl_arg_strings = []
cmd_arg_strings = []
valid_pname = ''
for arg in func.GetOriginalArgs()[:-1]:
if arg.name == 'length':
gl_arg_value = 'nullptr'
elif arg.name.endswith('size'):
gl_arg_value = ("decoder_->GetGLES2Util()->GLGetNumValuesReturned(%s)" %
valid_pname)
elif arg.type == 'GLsync':
gl_arg_value = 'reinterpret_cast<GLsync>(kServiceSyncId)'
else:
gl_arg_value = arg.GetValidGLArg(func)
gl_arg_strings.append(gl_arg_value)
if arg.name == 'pname':
valid_pname = gl_arg_value
if arg.name.endswith('size') or arg.name == 'length':
continue
if arg.type == 'GLsync':
arg_value = 'client_sync_id_'
else:
arg_value = arg.GetValidArg(func)
cmd_arg_strings.append(arg_value)
if func.GetInfo('gl_test_func') == 'glGetIntegerv':
gl_arg_strings.append("_")
else:
gl_arg_strings.append("result->GetData()")
cmd_arg_strings.append("shared_memory_id_")
cmd_arg_strings.append("shared_memory_offset_")
self.WriteValidUnitTest(func, f, valid_test, {
'local_gl_args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
'valid_pname': valid_pname,
}, *extras)
if not func.IsES3():
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s::Result* result =
static_cast<cmds::%(name)s::Result*>(shared_memory_address_);
result->size = 0;
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));
EXPECT_EQ(0u, result->size);%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, *extras)
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class ArrayArgTypeHandler(TypeHandler):
"""Base class for type handlers that handle args that are arrays"""
def GetArrayType(self, func):
"""Returns the type of the element in the element array being PUT to."""
for arg in func.GetOriginalArgs():
if arg.IsPointer():
element_type = arg.GetPointedType()
return element_type
# Special case: array type handler is used for a function that is forwarded
# to the actual array type implementation
element_type = func.GetOriginalArgs()[-1].type
assert all(arg.type == element_type \
for arg in func.GetOriginalArgs()[-self.GetArrayCount(func):])
return element_type
def GetArrayCount(self, func):
"""Returns the count of the elements in the array being PUT to."""
return func.GetInfo('count')
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class PUTHandler(ArrayArgTypeHandler):
"""Handler for glTexParameter_v, glVertexAttrib_v functions."""
def WriteServiceUnitTest(self, func, f, *extras):
"""Writes the service unit test for a command."""
expected_call = "EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));"
if func.GetInfo("first_element_only"):
gl_arg_strings = [
arg.GetValidGLArg(func) for arg in func.GetOriginalArgs()
]
gl_arg_strings[-1] = "*" + gl_arg_strings[-1]
expected_call = ("EXPECT_CALL(*gl_, %%(gl_func_name)s(%s));" %
", ".join(gl_arg_strings))
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
GetSharedMemoryAs<%(data_type)s*>()[0] = %(data_value)s;
%(expected_call)s
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
extra = {
'data_type': self.GetArrayType(func),
'data_value': func.GetInfo('data_value') or '0',
'expected_call': expected_call,
}
self.WriteValidUnitTest(func, f, valid_test, extra, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
GetSharedMemoryAs<%(data_type)s*>()[0] = %(data_value)s;
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, extra, *extras)
def WriteImmediateServiceUnitTest(self, func, f, *extras):
"""Writes the service unit test for a command."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(true);
%(data_type)s temp[%(data_count)s] = { %(data_value)s, };
cmd.Init(%(gl_client_args)s, &temp[0]);
EXPECT_CALL(
*gl_,
%(gl_func_name)s(%(gl_args)s, %(expectation)s));
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
gl_client_arg_strings = [
arg.GetValidArg(func) for arg in func.GetOriginalArgs()[0:-1]
]
gl_arg_strings = [
arg.GetValidGLArg(func) for arg in func.GetOriginalArgs()[0:-1]
]
gl_any_strings = ["_"] * len(gl_arg_strings)
data_count = self.GetArrayCount(func)
if func.GetInfo('first_element_only'):
expectation = "temp[0]"
else:
expectation = "PointsToArray(temp, %s)" % data_count
extra = {
'expectation': expectation,
'data_type': self.GetArrayType(func),
'data_count': data_count,
'data_value': func.GetInfo('data_value') or '0',
'gl_client_args': ", ".join(gl_client_arg_strings),
'gl_args': ", ".join(gl_arg_strings),
'gl_any_args': ", ".join(gl_any_strings),
}
self.WriteValidUnitTest(func, f, valid_test, extra, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();"""
if func.IsES3():
invalid_test += """
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_any_args)s, _)).Times(1);
"""
else:
invalid_test += """
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_any_args)s, _)).Times(0);
"""
invalid_test += """
SpecializedSetup<cmds::%(name)s, 0>(false);
%(data_type)s temp[%(data_count)s] = { %(data_value)s, };
cmd.Init(%(all_but_last_args)s, &temp[0]);
EXPECT_EQ(error::%(parse_result)s,
ExecuteImmediateCmd(cmd, sizeof(temp)));
%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, extra, *extras)
def WriteGetDataSizeCode(self, func, arg, f):
"""Overrriden from TypeHandler."""
code = (""" uint32_t %(data_size)s;
if (!%(namespace)sGLES2Util::""" +
"""ComputeDataSize<%(arrayType)s, %(arrayCount)d>(1, &%(data_size)s)) {
return error::kOutOfBounds;
}
""")
f.write(code % {'data_size': arg.GetReservedSizeId(),
'namespace': _Namespace(),
'arrayType': self.GetArrayType(func),
'arrayCount': self.GetArrayCount(func)})
if func.IsImmediate():
f.write(" if (%s > immediate_data_size) {\n" % arg.GetReservedSizeId())
f.write(" return error::kOutOfBounds;\n")
f.write(" }\n")
def __NeedsToCalcDataCount(self, func):
use_count_func = func.GetInfo('use_count_func')
return use_count_func != None and use_count_func != False
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
impl_func = func.GetInfo('impl_func')
if (impl_func != None and impl_func != True):
return;
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
if self.__NeedsToCalcDataCount(func):
f.write(" uint32_t count = %sGLES2Util::Calc%sDataCount(%s);\n" %
(_Namespace(), func.name, func.GetOriginalArgs()[0].name))
f.write(" DCHECK_LE(count, %du);\n" % self.GetArrayCount(func))
f.write(" if (count == 0) {\n")
f.write(" SetGLErrorInvalidEnum(\"%s\", %s, \"%s\");\n" %
(func.prefixed_name, func.GetOriginalArgs()[0].name,
func.GetOriginalArgs()[0].name))
f.write(" return;\n")
f.write(" }\n")
else:
f.write(" uint32_t count = %d;" % self.GetArrayCount(func))
f.write(" for (uint32_t ii = 0; ii < count; ++ii)\n")
f.write(' GPU_CLIENT_LOG("value[" << ii << "]: " << %s[ii]);\n' %
func.GetLastOriginalArg().name)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
f.write(" helper_->%sImmediate(%s);\n" %
(func.name, func.MakeOriginalArgString("")))
f.write(" CheckGLError();\n")
f.write("}\n")
f.write("\n")
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Writes the GLES2 Implemention unit test."""
client_test = func.GetInfo('client_test', True)
if not client_test:
return;
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
%(type)s data[%(count)d] = {0};
struct Cmds {
cmds::%(name)sImmediate cmd;
%(type)s data[%(count)d];
};
for (int jj = 0; jj < %(count)d; ++jj) {
data[jj] = static_cast<%(type)s>(jj);
}
Cmds expected;
expected.cmd.Init(%(cmd_args)s, &data[0]);
gl_->%(name)s(%(args)s, &data[0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
cmd_arg_strings = [
arg.GetValidClientSideCmdArg(func) for arg in func.GetCmdArgs()[0:-2]
]
gl_arg_strings = [
arg.GetValidClientSideArg(func) for arg in func.GetOriginalArgs()[0:-1]
]
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'type': self.GetArrayType(func),
'count': self.GetArrayCount(func),
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
def WriteImmediateCmdComputeSize(self, func, f):
"""Overrriden from TypeHandler."""
f.write(" static uint32_t ComputeDataSize() {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(" sizeof(%s) * %d);\n" %
(self.GetArrayType(func), self.GetArrayCount(func)))
f.write(" }\n")
f.write("\n")
if self.__NeedsToCalcDataCount(func):
f.write(" static uint32_t ComputeEffectiveDataSize(%s %s) {\n" %
(func.GetOriginalArgs()[0].type,
func.GetOriginalArgs()[0].name))
f.write(" return static_cast<uint32_t>(\n")
f.write(" sizeof(%s) * %sGLES2Util::Calc%sDataCount(%s));\n" %
(self.GetArrayType(func), _Namespace(), func.original_name,
func.GetOriginalArgs()[0].name))
f.write(" }\n")
f.write("\n")
f.write(" static uint32_t ComputeSize() {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(
" sizeof(ValueType) + ComputeDataSize());\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSetHeader(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(" void SetHeader() {\n")
f.write(
" header.SetCmdByTotalSize<ValueType>(ComputeSize());\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
f.write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
f.write(" SetHeader();\n")
args = func.GetCmdArgs()
for arg in args:
arg.WriteSetCode(f, 4, "_%s" % arg.name)
f.write(" memcpy(ImmediateDataAddress(this),\n")
if self.__NeedsToCalcDataCount(func):
f.write(" _%s, ComputeEffectiveDataSize(%s));" %
(last_arg.name, func.GetOriginalArgs()[0].name))
f.write("""
DCHECK_GE(ComputeDataSize(), ComputeEffectiveDataSize(%(arg)s));
char* pointer = reinterpret_cast<char*>(ImmediateDataAddress(this)) +
ComputeEffectiveDataSize(%(arg)s);
memset(pointer, 0, ComputeDataSize() - ComputeEffectiveDataSize(%(arg)s));
""" % { 'arg': func.GetOriginalArgs()[0].name, })
else:
f.write(" _%s, ComputeDataSize());\n" % last_arg.name)
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
f.write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
f.write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
f.write(" const uint32_t size = ComputeSize();\n")
f.write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdHelper(self, func, f):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = %(lp)s::cmds::%(name)s::ComputeSize();
%(lp)s::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<%(lp)s::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
f.write(code % {
"lp" : _lower_prefix,
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, f):
"""Overrriden from TypeHandler."""
f.write("TEST_F(%sFormatTest, %s) {\n" % (_prefix, func.name))
f.write(" const int kSomeBaseValueToTestWith = 51;\n")
f.write(" static %s data[] = {\n" % self.GetArrayType(func))
for v in range(0, self.GetArrayCount(func)):
f.write(" static_cast<%s>(kSomeBaseValueToTestWith + %d),\n" %
(self.GetArrayType(func), v))
f.write(" };\n")
f.write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
f.write(" void* next_cmd = cmd.Set(\n")
f.write(" &cmd")
args = func.GetCmdArgs()
for value, arg in enumerate(args):
f.write(",\n static_cast<%s>(%d)" % (arg.type, value + 11))
f.write(",\n data);\n")
args = func.GetCmdArgs()
f.write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n"
% func.name)
f.write(" cmd.header.command);\n")
f.write(" EXPECT_EQ(sizeof(cmd) +\n")
f.write(" RoundSizeToMultipleOfEntries(sizeof(data)),\n")
f.write(" cmd.header.size * 4u);\n")
for value, arg in enumerate(args):
f.write(" EXPECT_EQ(static_cast<%s>(%d), %s);\n" %
(arg.type, value + 11, arg.GetArgAccessor('cmd')))
f.write(" CheckBytesWrittenMatchesExpectedSize(\n")
f.write(" next_cmd, sizeof(cmd) +\n")
f.write(" RoundSizeToMultipleOfEntries(sizeof(data)));\n")
# TODO: Check that data was inserted
f.write("}\n")
f.write("\n")
class PUTnHandler(ArrayArgTypeHandler):
"""Handler for PUTn 'glUniform__v' type functions."""
def WriteServiceUnitTest(self, func, f, *extras):
"""Overridden from TypeHandler."""
ArrayArgTypeHandler.WriteServiceUnitTest(self, func, f, *extras)
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgsCountTooLarge) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
gl_arg_strings = []
arg_strings = []
for count, arg in enumerate(func.GetOriginalArgs()):
# hardcoded to match unit tests.
if count == 0:
# the location of the second element of the 2nd uniform.
# defined in GLES2DecoderBase::SetupShaderForUniform
gl_arg_strings.append("3")
arg_strings.append("ProgramManager::MakeFakeLocation(1, 1)")
elif count == 1:
# the number of elements that gl will be called with.
gl_arg_strings.append("3")
# the number of elements requested in the command.
arg_strings.append("5")
else:
gl_arg_strings.append(arg.GetValidGLArg(func))
if not arg.IsConstant():
arg_strings.append(arg.GetValidArg(func))
extra = {
'gl_args': ", ".join(gl_arg_strings),
'args': ", ".join(arg_strings),
}
self.WriteValidUnitTest(func, f, valid_test, extra, *extras)
def WriteImmediateServiceUnitTest(self, func, f, *extras):
"""Overridden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(true);
%(data_type)s temp[%(data_count)s * 2] = { 0, };
EXPECT_CALL(
*gl_,
%(gl_func_name)s(%(gl_args)s,
PointsToArray(temp, %(data_count)s)));
cmd.Init(%(args)s, &temp[0]);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
gl_arg_strings = []
gl_any_strings = []
arg_strings = []
for arg in func.GetOriginalArgs()[0:-1]:
gl_arg_strings.append(arg.GetValidGLArg(func))
gl_any_strings.append("_")
if not arg.IsConstant():
arg_strings.append(arg.GetValidArg(func))
extra = {
'data_type': self.GetArrayType(func),
'data_count': self.GetArrayCount(func),
'args': ", ".join(arg_strings),
'gl_args': ", ".join(gl_arg_strings),
'gl_any_args': ", ".join(gl_any_strings),
}
self.WriteValidUnitTest(func, f, valid_test, extra, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_any_args)s, _)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
%(data_type)s temp[%(data_count)s * 2] = { 0, };
cmd.Init(%(all_but_last_args)s, &temp[0]);
EXPECT_EQ(error::%(parse_result)s,
ExecuteImmediateCmd(cmd, sizeof(temp)));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, extra, *extras)
def WriteGetDataSizeCode(self, func, arg, f):
"""Overrriden from TypeHandler."""
code = (""" uint32_t %(data_size)s = 0;
if (count >= 0 && !%(namespace)sGLES2Util::""" +
"""ComputeDataSize<%(arrayType)s, %(arrayCount)d>(count, &%(data_size)s)) {
return error::kOutOfBounds;
}
""")
f.write(code % {'data_size': arg.GetReservedSizeId(),
'namespace': _Namespace(),
'arrayType': self.GetArrayType(func),
'arrayCount': self.GetArrayCount(func)})
if func.IsImmediate():
f.write(" if (%s > immediate_data_size) {\n" % arg.GetReservedSizeId())
f.write(" return error::kOutOfBounds;\n")
f.write(" }\n")
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
impl_func = func.GetInfo('impl_func')
if (impl_func != None and impl_func != True):
return;
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
last_pointer_name = func.GetLastOriginalPointerArg().name
f.write(""" GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei i = 0; i < count; ++i) {
""")
values_str = ' << ", " << '.join(
["%s[%d + i * %d]" % (
last_pointer_name, ndx, self.GetArrayCount(func)) for ndx in range(
0, self.GetArrayCount(func))])
f.write(' GPU_CLIENT_LOG(" " << i << ": " << %s);\n' % values_str)
f.write(" }\n });\n")
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
f.write(" helper_->%sImmediate(%s);\n" %
(func.name, func.MakeInitString("")))
f.write(" CheckGLError();\n")
f.write("}\n")
f.write("\n")
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Writes the GLES2 Implemention unit test."""
client_test = func.GetInfo('client_test', True)
if not client_test:
return;
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
%(type)s data[%(count_param)d][%(count)d] = {{0}};
struct Cmds {
cmds::%(name)sImmediate cmd;
%(type)s data[%(count_param)d][%(count)d];
};
Cmds expected;
for (int ii = 0; ii < %(count_param)d; ++ii) {
for (int jj = 0; jj < %(count)d; ++jj) {
data[ii][jj] = static_cast<%(type)s>(ii * %(count)d + jj);
}
}
expected.cmd.Init(%(cmd_args)s);
gl_->%(name)s(%(args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
cmd_arg_strings = []
for arg in func.GetCmdArgs():
if arg.name.endswith("_shm_id"):
cmd_arg_strings.append("&data[0][0]")
elif arg.name.endswith("_shm_offset"):
continue
else:
cmd_arg_strings.append(arg.GetValidClientSideCmdArg(func))
gl_arg_strings = []
count_param = 0
for arg in func.GetOriginalArgs():
if arg.IsPointer():
valid_value = "&data[0][0]"
else:
valid_value = arg.GetValidClientSideArg(func)
gl_arg_strings.append(valid_value)
if arg.name == "count":
count_param = int(valid_value)
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'type': self.GetArrayType(func),
'count': self.GetArrayCount(func),
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
'count_param': count_param,
})
# Test constants for invalid values, as they are not tested by the
# service.
constants = [
arg for arg in func.GetOriginalArgs()[0:-1] if arg.IsConstant()
]
if not constants:
return
code = """
TEST_F(%(prefix)sImplementationTest,
%(name)sInvalidConstantArg%(invalid_index)d) {
%(type)s data[%(count_param)d][%(count)d] = {{0}};
for (int ii = 0; ii < %(count_param)d; ++ii) {
for (int jj = 0; jj < %(count)d; ++jj) {
data[ii][jj] = static_cast<%(type)s>(ii * %(count)d + jj);
}
}
gl_->%(name)s(%(args)s);
EXPECT_TRUE(NoCommandsWritten());
EXPECT_EQ(%(gl_error)s, CheckError());
}
"""
for invalid_arg in constants:
gl_arg_strings = []
invalid = invalid_arg.GetInvalidArg(func)
for arg in func.GetOriginalArgs():
if arg is invalid_arg:
gl_arg_strings.append(invalid[0])
elif arg.IsPointer():
gl_arg_strings.append("&data[0][0]")
else:
valid_value = arg.GetValidClientSideArg(func)
gl_arg_strings.append(valid_value)
if arg.name == "count":
count_param = int(valid_value)
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'invalid_index': func.GetOriginalArgs().index(invalid_arg),
'type': self.GetArrayType(func),
'count': self.GetArrayCount(func),
'args': ", ".join(gl_arg_strings),
'gl_error': invalid[2],
'count_param': count_param,
})
def WriteImmediateCmdComputeSize(self, func, f):
"""Overrriden from TypeHandler."""
f.write(" static uint32_t ComputeDataSize(GLsizei _n) {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(" sizeof(%s) * %d * _n); // NOLINT\n" %
(self.GetArrayType(func), self.GetArrayCount(func)))
f.write(" }\n")
f.write("\n")
f.write(" static uint32_t ComputeSize(GLsizei _n) {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(
" sizeof(ValueType) + ComputeDataSize(_n)); // NOLINT\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSetHeader(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(" void SetHeader(GLsizei _n) {\n")
f.write(
" header.SetCmdByTotalSize<ValueType>(ComputeSize(_n));\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
f.write(" void Init(%s) {\n" %
func.MakeTypedInitString("_"))
f.write(" SetHeader(_count);\n")
args = func.GetCmdArgs()
for arg in args:
arg.WriteSetCode(f, 4, "_%s" % arg.name)
f.write(" memcpy(ImmediateDataAddress(this),\n")
pointer_arg = func.GetLastOriginalPointerArg()
f.write(" _%s, ComputeDataSize(_count));\n" % pointer_arg.name)
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
f.write(" void* Set(void* cmd%s) {\n" %
func.MakeTypedInitString("_", True))
f.write(" static_cast<ValueType*>(cmd)->Init(%s);\n" %
func.MakeInitString("_"))
f.write(" const uint32_t size = ComputeSize(_count);\n")
f.write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdHelper(self, func, f):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = %(lp)s::cmds::%(name)s::ComputeSize(count);
%(lp)s::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<%(lp)s::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
f.write(code % {
"lp" : _lower_prefix,
"name": func.name,
"typed_args": func.MakeTypedInitString(""),
"args": func.MakeInitString("")
})
def WriteImmediateFormatTest(self, func, f):
"""Overrriden from TypeHandler."""
args = func.GetOriginalArgs()
count_param = 0
for arg in args:
if arg.name == "count":
count_param = int(arg.GetValidClientSideCmdArg(func))
f.write("TEST_F(%sFormatTest, %s) {\n" % (_prefix, func.name))
f.write(" const int kSomeBaseValueToTestWith = 51;\n")
f.write(" static %s data[] = {\n" % self.GetArrayType(func))
for v in range(0, self.GetArrayCount(func) * count_param):
f.write(" static_cast<%s>(kSomeBaseValueToTestWith + %d),\n" %
(self.GetArrayType(func), v))
f.write(" };\n")
f.write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
f.write(" const GLsizei kNumElements = %d;\n" % count_param)
f.write(" const size_t kExpectedCmdSize =\n")
f.write(" sizeof(cmd) + kNumElements * sizeof(%s) * %d;\n" %
(self.GetArrayType(func), self.GetArrayCount(func)))
f.write(" void* next_cmd = cmd.Set(\n")
f.write(" &cmd")
for value, arg in enumerate(args):
if arg.IsPointer():
f.write(",\n data")
elif arg.IsConstant():
continue
else:
f.write(",\n static_cast<%s>(%d)" % (arg.type, value + 1))
f.write(");\n")
f.write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
f.write(" cmd.header.command);\n")
f.write(" EXPECT_EQ(kExpectedCmdSize, cmd.header.size * 4u);\n")
for value, arg in enumerate(args):
if arg.IsPointer() or arg.IsConstant():
continue
f.write(" EXPECT_EQ(static_cast<%s>(%d), %s);\n" %
(arg.type, value + 1, arg.GetArgAccessor('cmd')))
f.write(" CheckBytesWrittenMatchesExpectedSize(\n")
f.write(" next_cmd, sizeof(cmd) +\n")
f.write(" RoundSizeToMultipleOfEntries(sizeof(data)));\n")
# TODO: Check that data was inserted
f.write("}\n")
f.write("\n")
class PUTSTRHandler(ArrayArgTypeHandler):
"""Handler for functions that pass a string array."""
def __GetDataArg(self, func):
"""Return the argument that points to the 2D char arrays"""
for arg in func.GetOriginalArgs():
if arg.IsPointer2D():
return arg
return None
def __GetLengthArg(self, func):
"""Return the argument that holds length for each char array"""
for arg in func.GetOriginalArgs():
if arg.IsPointer() and not arg.IsPointer2D():
return arg
return None
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
data_arg = self.__GetDataArg(func)
length_arg = self.__GetLengthArg(func)
log_code_block = """ GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei ii = 0; ii < count; ++ii) {
if (%(data)s[ii]) {"""
if length_arg == None:
log_code_block += """
GPU_CLIENT_LOG(" " << ii << ": ---\\n" << %(data)s[ii] << "\\n---");"""
else:
log_code_block += """
if (%(length)s && %(length)s[ii] >= 0) {
const std::string my_str(%(data)s[ii], %(length)s[ii]);
GPU_CLIENT_LOG(" " << ii << ": ---\\n" << my_str << "\\n---");
} else {
GPU_CLIENT_LOG(" " << ii << ": ---\\n" << %(data)s[ii] << "\\n---");
}"""
log_code_block += """
} else {
GPU_CLIENT_LOG(" " << ii << ": NULL");
}
}
});
"""
f.write(log_code_block % {
'data': data_arg.name,
'length': length_arg.name if not length_arg == None else ''
})
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
bucket_args = []
for arg in func.GetOriginalArgs():
if arg.name == 'count' or arg == self.__GetLengthArg(func):
continue
if arg == self.__GetDataArg(func):
bucket_args.append('kResultBucketId')
else:
bucket_args.append(arg.name)
code_block = """
if (!PackStringsToBucket(count, %(data)s, %(length)s, "gl%(func_name)s")) {
return;
}
helper_->%(func_name)sBucket(%(bucket_args)s);
helper_->SetBucketSize(kResultBucketId, 0);
CheckGLError();
}
"""
f.write(code_block % {
'data': data_arg.name,
'length': length_arg.name if not length_arg == None else 'nullptr',
'func_name': func.name,
'bucket_args': ', '.join(bucket_args),
})
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Overrriden from TypeHandler."""
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
const uint32_t kBucketId = %(prefix)sImplementation::kResultBucketId;
const char* kString1 = "happy";
const char* kString2 = "ending";
const size_t kString1Size = ::strlen(kString1) + 1;
const size_t kString2Size = ::strlen(kString2) + 1;
const size_t kHeaderSize = sizeof(GLint) * 3;
const size_t kSourceSize = kHeaderSize + kString1Size + kString2Size;
const size_t kPaddedHeaderSize =
transfer_buffer_->RoundToAlignment(kHeaderSize);
const size_t kPaddedString1Size =
transfer_buffer_->RoundToAlignment(kString1Size);
const size_t kPaddedString2Size =
transfer_buffer_->RoundToAlignment(kString2Size);
struct Cmds {
cmd::SetBucketSize set_bucket_size;
cmd::SetBucketData set_bucket_header;
cmd::SetToken set_token1;
cmd::SetBucketData set_bucket_data1;
cmd::SetToken set_token2;
cmd::SetBucketData set_bucket_data2;
cmd::SetToken set_token3;
cmds::%(name)sBucket cmd_bucket;
cmd::SetBucketSize clear_bucket_size;
};
ExpectedMemoryInfo mem0 = GetExpectedMemory(kPaddedHeaderSize);
ExpectedMemoryInfo mem1 = GetExpectedMemory(kPaddedString1Size);
ExpectedMemoryInfo mem2 = GetExpectedMemory(kPaddedString2Size);
Cmds expected;
expected.set_bucket_size.Init(kBucketId, kSourceSize);
expected.set_bucket_header.Init(
kBucketId, 0, kHeaderSize, mem0.id, mem0.offset);
expected.set_token1.Init(GetNextToken());
expected.set_bucket_data1.Init(
kBucketId, kHeaderSize, kString1Size, mem1.id, mem1.offset);
expected.set_token2.Init(GetNextToken());
expected.set_bucket_data2.Init(
kBucketId, kHeaderSize + kString1Size, kString2Size, mem2.id,
mem2.offset);
expected.set_token3.Init(GetNextToken());
expected.cmd_bucket.Init(%(bucket_args)s);
expected.clear_bucket_size.Init(kBucketId, 0);
const char* kStrings[] = { kString1, kString2 };
gl_->%(name)s(%(gl_args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
gl_args = []
bucket_args = []
for arg in func.GetOriginalArgs():
if arg == self.__GetDataArg(func):
gl_args.append('kStrings')
bucket_args.append('kBucketId')
elif arg == self.__GetLengthArg(func):
gl_args.append('nullptr')
elif arg.name == 'count':
gl_args.append('2')
else:
gl_args.append(arg.GetValidClientSideArg(func))
bucket_args.append(arg.GetValidClientSideArg(func))
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'gl_args': ", ".join(gl_args),
'bucket_args': ", ".join(bucket_args),
})
if self.__GetLengthArg(func) == None:
return
code = """
TEST_F(%(prefix)sImplementationTest, %(name)sWithLength) {
const uint32_t kBucketId = %(prefix)sImplementation::kResultBucketId;
const char* kString = "foobar******";
const size_t kStringSize = 6; // We only need "foobar".
const size_t kHeaderSize = sizeof(GLint) * 2;
const size_t kSourceSize = kHeaderSize + kStringSize + 1;
const size_t kPaddedHeaderSize =
transfer_buffer_->RoundToAlignment(kHeaderSize);
const size_t kPaddedStringSize =
transfer_buffer_->RoundToAlignment(kStringSize + 1);
struct Cmds {
cmd::SetBucketSize set_bucket_size;
cmd::SetBucketData set_bucket_header;
cmd::SetToken set_token1;
cmd::SetBucketData set_bucket_data;
cmd::SetToken set_token2;
cmds::ShaderSourceBucket shader_source_bucket;
cmd::SetBucketSize clear_bucket_size;
};
ExpectedMemoryInfo mem0 = GetExpectedMemory(kPaddedHeaderSize);
ExpectedMemoryInfo mem1 = GetExpectedMemory(kPaddedStringSize);
Cmds expected;
expected.set_bucket_size.Init(kBucketId, kSourceSize);
expected.set_bucket_header.Init(
kBucketId, 0, kHeaderSize, mem0.id, mem0.offset);
expected.set_token1.Init(GetNextToken());
expected.set_bucket_data.Init(
kBucketId, kHeaderSize, kStringSize + 1, mem1.id, mem1.offset);
expected.set_token2.Init(GetNextToken());
expected.shader_source_bucket.Init(%(bucket_args)s);
expected.clear_bucket_size.Init(kBucketId, 0);
const char* kStrings[] = { kString };
const GLint kLength[] = { kStringSize };
gl_->%(name)s(%(gl_args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
gl_args = []
for arg in func.GetOriginalArgs():
if arg == self.__GetDataArg(func):
gl_args.append('kStrings')
elif arg == self.__GetLengthArg(func):
gl_args.append('kLength')
elif arg.name == 'count':
gl_args.append('1')
else:
gl_args.append(arg.GetValidClientSideArg(func))
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'gl_args': ", ".join(gl_args),
'bucket_args': ", ".join(bucket_args),
})
def WriteBucketServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
cmd_args = []
cmd_args_with_invalid_id = []
gl_args = []
for index, arg in enumerate(func.GetOriginalArgs()):
if arg == self.__GetLengthArg(func):
gl_args.append('_')
elif arg.name == 'count':
gl_args.append('1')
elif arg == self.__GetDataArg(func):
cmd_args.append('kBucketId')
cmd_args_with_invalid_id.append('kBucketId')
gl_args.append('_')
elif index == 0: # Resource ID arg
cmd_args.append(arg.GetValidArg(func))
cmd_args_with_invalid_id.append('kInvalidClientId')
gl_args.append(arg.GetValidGLArg(func))
else:
cmd_args.append(arg.GetValidArg(func))
cmd_args_with_invalid_id.append(arg.GetValidArg(func))
gl_args.append(arg.GetValidGLArg(func))
test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
const uint32_t kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kValidStrEnd = 0;
SetBucketAsCStrings(kBucketId, 1, kSource, 1, kValidStrEnd);
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));"""
test += """
}
"""
self.WriteValidUnitTest(func, f, test, {
'cmd_args': ", ".join(cmd_args),
'gl_args': ", ".join(gl_args),
}, *extras)
test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
const uint32_t kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kValidStrEnd = 0;
cmds::%(name)s cmd;
// Test no bucket.
cmd.Init(%(cmd_args)s);
EXPECT_NE(error::kNoError, ExecuteCmd(cmd));
// Test invalid client.
SetBucketAsCStrings(kBucketId, 1, kSource, 1, kValidStrEnd);
cmd.Init(%(cmd_args_with_invalid_id)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
self.WriteValidUnitTest(func, f, test, {
'cmd_args': ", ".join(cmd_args),
'cmd_args_with_invalid_id': ", ".join(cmd_args_with_invalid_id),
}, *extras)
test = """
TEST_P(%(test_name)s, %(name)sInvalidHeader) {
const uint32_t kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kValidStrEnd = 0;
const GLsizei kCount = static_cast<GLsizei>(base::size(kSource));
const GLsizei kTests[] = {
kCount + 1,
0,
std::numeric_limits<GLsizei>::max(),
-1,
};
for (size_t ii = 0; ii < base::size(kTests); ++ii) {
SetBucketAsCStrings(kBucketId, 1, kSource, kTests[ii], kValidStrEnd);
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);
EXPECT_EQ(error::kInvalidArguments, ExecuteCmd(cmd));
}
}
"""
self.WriteValidUnitTest(func, f, test, {
'cmd_args': ", ".join(cmd_args),
}, *extras)
test = """
TEST_P(%(test_name)s, %(name)sInvalidStringEnding) {
const uint32_t kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kInvalidStrEnd = '*';
SetBucketAsCStrings(kBucketId, 1, kSource, 1, kInvalidStrEnd);
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);
EXPECT_EQ(error::kInvalidArguments, ExecuteCmd(cmd));
}
"""
self.WriteValidUnitTest(func, f, test, {
'cmd_args': ", ".join(cmd_args),
}, *extras)
class PUTXnHandler(ArrayArgTypeHandler):
"""Handler for glUniform?f functions."""
def WriteHandlerImplementation(self, func, f):
"""Overrriden from TypeHandler."""
code = """ %(type)s temp[%(count)s] = { %(values)s};
Do%(name)sv(%(location)s, 1, &temp[0]);
"""
values = ""
args = func.GetOriginalArgs()
count = int(self.GetArrayCount(func))
for ii in range(count):
values += "%s, " % args[len(args) - count + ii].name
f.write(code % {
'name': func.name,
'count': self.GetArrayCount(func),
'type': self.GetArrayType(func),
'location': args[0].name,
'args': func.MakeOriginalArgString(""),
'values': values,
})
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(name)sv(%(local_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
args = func.GetOriginalArgs()
local_args = "%s, 1, _" % args[0].GetValidGLArg(func)
self.WriteValidUnitTest(func, f, valid_test, {
'name': func.name,
'count': self.GetArrayCount(func),
'local_args': local_args,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(name)sv(_, _, _).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, {
'name': func.GetInfo('name'),
'count': self.GetArrayCount(func),
})
class GLcharHandler(CustomHandler):
"""Handler for functions that pass a single string ."""
def WriteImmediateCmdComputeSize(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(" static uint32_t ComputeSize(uint32_t data_size) {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(" sizeof(ValueType) + data_size); // NOLINT\n")
f.write(" }\n")
def WriteImmediateCmdSetHeader(self, _func, f):
"""Overrriden from TypeHandler."""
code = """
void SetHeader(uint32_t data_size) {
header.SetCmdBySize<ValueType>(data_size);
}
"""
f.write(code)
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
args = func.GetCmdArgs()
code = """
void Init(%s, uint32_t _data_size) {
SetHeader(_data_size);
"""
f.write(code % func.MakeTypedArgString("_"))
for arg in args:
arg.WriteSetCode(f, 4, "_%s" % arg.name)
code = """
memcpy(ImmediateDataAddress(this), _%s, _data_size);
}
"""
f.write(code % last_arg.name)
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
f.write(" void* Set(void* cmd%s, uint32_t _data_size) {\n" %
func.MakeTypedCmdArgString("_", True))
f.write(" static_cast<ValueType*>(cmd)->Init(%s, _data_size);\n" %
func.MakeCmdArgString("_"))
f.write(" return NextImmediateCmdAddress<ValueType>("
"cmd, _data_size);\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdHelper(self, func, f):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t data_size = strlen(name);
%(lp)s::cmds::%(name)s* c =
GetImmediateCmdSpace<%(lp)s::cmds::%(name)s>(data_size);
if (c) {
c->Init(%(args)s, data_size);
}
}
"""
f.write(code % {
"lp" : _lower_prefix,
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, f):
"""Overrriden from TypeHandler."""
init_code = []
check_code = []
all_but_last_arg = func.GetCmdArgs()[:-1]
for value, arg in enumerate(all_but_last_arg):
init_code.append(" static_cast<%s>(%d)," % (arg.type, value + 11))
for value, arg in enumerate(all_but_last_arg):
check_code.append(" EXPECT_EQ(static_cast<%s>(%d), %s);" %
(arg.type, value + 11, arg.GetArgAccessor('cmd')))
code = """
TEST_F(%(prefix)sFormatTest, %(func_name)s) {
cmds::%(func_name)s& cmd = *GetBufferAs<cmds::%(func_name)s>();
static const char* const test_str = \"test string\";
void* next_cmd = cmd.Set(
&cmd,
%(init_code)s
test_str,
strlen(test_str));
EXPECT_EQ(static_cast<uint32_t>(cmds::%(func_name)s::kCmdId),
cmd.header.command);
EXPECT_EQ(sizeof(cmd) +
RoundSizeToMultipleOfEntries(strlen(test_str)),
cmd.header.size * 4u);
EXPECT_EQ(static_cast<char*>(next_cmd),
reinterpret_cast<char*>(&cmd) + sizeof(cmd) +
RoundSizeToMultipleOfEntries(strlen(test_str)));
%(check_code)s
EXPECT_EQ(static_cast<uint32_t>(strlen(test_str)), cmd.data_size);
EXPECT_EQ(0, memcmp(test_str, ImmediateDataAddress(&cmd), strlen(test_str)));
CheckBytesWritten(
next_cmd,
sizeof(cmd) + RoundSizeToMultipleOfEntries(strlen(test_str)),
sizeof(cmd) + strlen(test_str));
}
"""
f.write(code % {
'prefix': _prefix,
'func_name': func.name,
'init_code': "\n".join(init_code),
'check_code': "\n".join(check_code),
})
class GLcharNHandler(CustomHandler):
"""Handler for functions that pass a single string with an optional len."""
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
func.cmd_args = []
func.AddCmdArg(Argument('bucket_id', 'GLuint'))
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
return False
def WriteServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
self.WriteServiceHandlerFunctionHeader(func, f)
if func.IsES31():
return
f.write("""
GLuint bucket_id = static_cast<GLuint>(c.%(bucket_id)s);
Bucket* bucket = GetBucket(bucket_id);
if (!bucket || bucket->size() == 0) {
return error::kInvalidArguments;
}
std::string str;
if (!bucket->GetAsString(&str)) {
return error::kInvalidArguments;
}
%(gl_func_name)s(0, str.c_str());
return error::kNoError;
}
""" % {
'gl_func_name': func.GetGLFunctionName(),
'bucket_id': func.cmd_args[0].name,
})
class IsHandler(TypeHandler):
"""Handler for glIs____ type and glGetError functions."""
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
func.AddCmdArg(Argument("result_shm_id", 'uint32_t'))
func.AddCmdArg(Argument("result_shm_offset", 'uint32_t'))
if func.GetInfo('result') == None:
func.AddInfo('result', ['uint32_t'])
func.passthrough_service_doer_args.append(Argument('result', 'uint32_t*'))
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)sshared_memory_id_, shared_memory_offset_);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
comma = ""
if len(func.GetOriginalArgs()):
comma =", "
self.WriteValidUnitTest(func, f, valid_test, {
'comma': comma,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)sshared_memory_id_, shared_memory_offset_);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, {
'comma': comma,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgsBadSharedMemoryId) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)skInvalidSharedMemoryId, shared_memory_offset_);
EXPECT_EQ(error::kOutOfBounds, ExecuteCmd(cmd));
cmd.Init(%(args)s%(comma)sshared_memory_id_, kInvalidSharedMemoryOffset);
EXPECT_EQ(error::kOutOfBounds, ExecuteCmd(cmd));
}
"""
self.WriteValidUnitTest(func, f, invalid_test, {
'comma': comma,
}, *extras)
def WriteServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
self.WriteServiceHandlerFunctionHeader(func, f)
if func.IsES31():
return
self.WriteHandlerExtensionCheck(func, f)
args = func.GetOriginalArgs()
for arg in args:
arg.WriteGetCode(f)
code = """ typedef cmds::%(func_name)s::Result Result;
Result* result_dst = GetSharedMemoryAs<Result*>(
c.result_shm_id, c.result_shm_offset, sizeof(*result_dst));
if (!result_dst) {
return error::kOutOfBounds;
}
"""
f.write(code % {'func_name': func.name})
func.WriteHandlerValidation(f)
f.write(" *result_dst = %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WritePassthroughServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
self.WritePassthroughServiceFunctionHeader(func, f)
self.WriteHandlerExtensionCheck(func, f)
self.WriteServiceHandlerArgGetCode(func, f)
code = """ typedef cmds::%(func_name)s::Result Result;
Result* result = GetSharedMemoryAs<Result*>(
c.result_shm_id, c.result_shm_offset, sizeof(*result));
if (!result) {
return error::kOutOfBounds;
}
"""
f.write(code % {'func_name': func.name})
self.WritePassthroughServiceFunctionDoerCall(func, f)
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
impl_func = func.GetInfo('impl_func', True)
if impl_func:
error_value = func.GetInfo("error_value") or "GL_FALSE"
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
self.WriteTraceEvent(func, f)
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
f.write(" typedef cmds::%s::Result Result;\n" % func.name)
f.write(" ScopedResultPtr<Result> result = GetResultAs<Result>();\n")
f.write(" if (!result) {\n")
f.write(" return %s;\n" % error_value)
f.write(" }\n")
f.write(" *result = 0;\n")
assert len(func.GetOriginalArgs()) == 1
id_arg = func.GetOriginalArgs()[0]
if id_arg.type == 'GLsync':
arg_string = "ToGLuint(%s)" % func.MakeOriginalArgString("")
else:
arg_string = func.MakeOriginalArgString("")
f.write(
" helper_->%s(%s, GetResultShmId(), result.offset());\n" %
(func.name, arg_string))
f.write(" WaitForCmd();\n")
f.write(" %s result_value = *result" % func.return_type)
if func.return_type == "GLboolean":
f.write(" != 0")
f.write(';\n GPU_CLIENT_LOG("returned " << result_value);\n')
f.write(" CheckGLError();\n")
f.write(" return result_value;\n")
f.write("}\n")
f.write("\n")
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Overrriden from TypeHandler."""
client_test = func.GetInfo('client_test', True)
if client_test:
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
Cmds expected;
ExpectedMemoryInfo result1 =
GetExpectedResultMemory(sizeof(cmds::%(name)s::Result));
expected.cmd.Init(%(cmd_id_value)s, result1.id, result1.offset);
EXPECT_CALL(*command_buffer(), OnFlush())
.WillOnce(SetMemory(result1.ptr, uint32_t(GL_TRUE)))
.RetiresOnSaturation();
GLboolean result = gl_->%(name)s(%(gl_id_value)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
EXPECT_TRUE(result);
}
"""
args = func.GetOriginalArgs()
assert len(args) == 1
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'cmd_id_value': args[0].GetValidClientSideCmdArg(func),
'gl_id_value': args[0].GetValidClientSideArg(func) })
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class STRnHandler(TypeHandler):
"""Handler for GetProgramInfoLog, GetShaderInfoLog, GetShaderSource, and
GetTranslatedShaderSourceANGLE."""
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
# remove all but the first cmd args.
cmd_args = func.GetCmdArgs()
func.ClearCmdArgs()
func.AddCmdArg(cmd_args[0])
# add on a bucket id.
func.AddCmdArg(Argument('bucket_id', 'uint32_t'))
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
code_1 = """%(return_type)s %(prefix)sImplementation::%(func_name)s(
%(args)s) {
GPU_CLIENT_SINGLE_THREAD_CHECK();
"""
code_2 = """ GPU_CLIENT_LOG("[" << GetLogPrefix()
<< "] gl%(func_name)s" << "("
<< %(arg0)s << ", "
<< %(arg1)s << ", "
<< static_cast<void*>(%(arg2)s) << ", "
<< static_cast<void*>(%(arg3)s) << ")");
helper_->SetBucketSize(kResultBucketId, 0);
helper_->%(func_name)s(%(id_name)s, kResultBucketId);
std::string str;
GLsizei max_size = 0;
if (GetBucketAsString(kResultBucketId, &str)) {
if (bufsize > 0) {
max_size =
std::min(static_cast<size_t>(%(bufsize_name)s) - 1, str.size());
memcpy(%(dest_name)s, str.c_str(), max_size);
%(dest_name)s[max_size] = '\\0';
GPU_CLIENT_LOG("------\\n" << %(dest_name)s << "\\n------");
}
}
if (%(length_name)s != nullptr) {
*%(length_name)s = max_size;
}
CheckGLError();
}
"""
args = func.GetOriginalArgs()
str_args = {
'prefix' : _prefix,
'return_type': func.return_type,
'func_name': func.original_name,
'args': func.MakeTypedOriginalArgString(""),
'id_name': args[0].name,
'bufsize_name': args[1].name,
'length_name': args[2].name,
'dest_name': args[3].name,
'arg0': args[0].name,
'arg1': args[1].name,
'arg2': args[2].name,
'arg3': args[3].name,
}
f.write(code_1 % str_args)
func.WriteDestinationInitalizationValidation(f)
f.write(code_2 % str_args)
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
const char* kInfo = "hello";
const uint32_t kBucketId = 123;
SpecializedSetup<cmds::%(name)s, 0>(true);
%(expect_len_code)s
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s))
.WillOnce(DoAll(SetArgPointee<2>(strlen(kInfo)),
SetArrayArgument<3>(kInfo, kInfo + strlen(kInfo) + 1)));
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
CommonDecoder::Bucket* bucket = decoder_->GetBucket(kBucketId);
ASSERT_TRUE(bucket != nullptr);
EXPECT_EQ(strlen(kInfo) + 1, bucket->size());
EXPECT_EQ(0, memcmp(bucket->GetData(0, bucket->size()), kInfo,
bucket->size()));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
args = func.GetOriginalArgs()
id_name = args[0].GetValidGLArg(func)
get_len_func = func.GetInfo('get_len_func')
get_len_enum = func.GetInfo('get_len_enum')
sub = {
'id_name': id_name,
'get_len_func': get_len_func,
'get_len_enum': get_len_enum,
'gl_args': '%s, strlen(kInfo) + 1, _, _' %
args[0].GetValidGLArg(func),
'args': '%s, kBucketId' % args[0].GetValidArg(func),
'expect_len_code': '',
}
if get_len_func and get_len_func[0:2] == 'gl':
sub['expect_len_code'] = (
" EXPECT_CALL(*gl_, %s(%s, %s, _))\n"
" .WillOnce(SetArgPointee<2>(strlen(kInfo) + 1));") % (
get_len_func[2:], id_name, get_len_enum)
self.WriteValidUnitTest(func, f, valid_test, sub, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
const uint32_t kBucketId = 123;
EXPECT_CALL(*gl_, %(gl_func_name)s(_, _, _, _))
.Times(0);
cmds::%(name)s cmd;
cmd.Init(kInvalidClientId, kBucketId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
self.WriteValidUnitTest(func, f, invalid_test, *extras)
def WriteServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
if func.IsES31():
TypeHandler.WriteServiceImplementation(self, func, f)
def WritePassthroughServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class NamedType(object):
"""A class that represents a type of an argument in a client function.
A type of an argument that is to be passed through in the command buffer
command. Currently used only for the arguments that are specificly named in
the 'cmd_buffer_functions.txt' f, mostly enums.
"""
def __init__(self, info):
assert not 'is_complete' in info or info['is_complete'] == True
self.info = info
self.valid = info['valid']
if 'invalid' in info:
self.invalid = info['invalid']
else:
self.invalid = []
if 'valid_es3' in info:
self.valid_es3 = info['valid_es3']
else:
self.valid_es3 = []
if 'deprecated_es3' in info:
self.deprecated_es3 = info['deprecated_es3']
else:
self.deprecated_es3 = []
self.create_validator = info.get('validator', True)
self.is_complete = info.get('is_complete', False)
def GetType(self):
return self.info['type']
def GetInvalidValues(self):
return self.invalid
def GetValidValues(self):
return self.valid
def GetValidValuesES3(self):
return self.valid_es3
def GetDeprecatedValuesES3(self):
return self.deprecated_es3
def HasES3Values(self):
return self.valid_es3 or self.deprecated_es3
def IsConstant(self):
return self.is_complete and len(self.GetValidValues()) == 1
def IsComplete(self):
return self.is_complete
def CreateValidator(self):
return self.create_validator and not self.IsConstant()
def GetConstantValue(self):
return self.GetValidValues()[0]
class Argument(object):
"""A class that represents a function argument."""
cmd_type_map_ = {
'GLenum': ['uint32_t'],
'GLint': ['int32_t'],
'GLintptr': ['int32_t'],
'GLsizei': ['int32_t'],
'GLsizeiptr': ['int32_t'],
'GLfloat': ['float'],
'GLclampf': ['float'],
'GLuint64': ['uint32_t', 'uint32_t'],
}
need_validation_ = ['GLsizei*', 'GLboolean*', 'GLenum*', 'GLint*']
def __init__(self, name, arg_type, arg_default = None):
self.name = name
self.optional = arg_type.endswith("Optional*")
if self.optional:
arg_type = arg_type[:-len("Optional*")] + "*"
self.type = arg_type
self.default = arg_default
if arg_type in self.cmd_type_map_:
self.cmd_type = self.cmd_type_map_[arg_type]
else:
self.cmd_type = ['uint32_t']
def IsPointer(self):
"""Returns true if argument is a pointer."""
return False
def IsPointer2D(self):
"""Returns true if argument is a 2D pointer."""
return False
def IsConstant(self):
"""Returns true if the argument has only one valid value."""
return False
def AddCmdArgs(self, args):
"""Adds command arguments for this argument to the given list."""
if not self.IsConstant():
return args.append(self)
def AddInitArgs(self, args):
"""Adds init arguments for this argument to the given list."""
if not self.IsConstant():
return args.append(self)
def GetValidArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
index = func.GetOriginalArgs().index(self)
return str(index + 1)
def GetArgDecls(self):
if len(self.cmd_type) == 1:
return [(self.cmd_type[0], self.name)]
else:
return [(cmd_type, self.name + '_%d' % i)
for i, cmd_type
in enumerate(self.cmd_type)]
def GetReservedSizeId(self):
"""Gets a special identifier name for the data size of this argument"""
return "%s_size" % self.name
def GetValidClientSideArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
if self.IsPointer():
return 'nullptr'
index = func.GetOriginalArgs().index(self)
if self.type == 'GLsync':
return ("reinterpret_cast<GLsync>(%d)" % (index + 1))
return str(index + 1)
def GetValidClientSideCmdArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
try:
index = func.GetOriginalArgs().index(self)
return str(index + 1)
except ValueError:
pass
index = func.GetCmdArgs().index(self)
return str(index + 1)
def GetValidGLArg(self, func):
"""Gets a valid GL value for this argument."""
value = self.GetValidArg(func)
if self.type == 'GLsync':
return ("reinterpret_cast<GLsync>(%s)" % value)
return value
def GetValidNonCachedClientSideArg(self, _func):
"""Returns a valid value for this argument in a GL call.
Using the value will produce a command buffer service invocation.
Returns None if there is no such value."""
value = '123'
if self.type == 'GLsync':
return ("reinterpret_cast<GLsync>(%s)" % value)
return value
def GetValidNonCachedClientSideCmdArg(self, _func):
"""Returns a valid value for this argument in a command buffer command.
Calling the GL function with the value returned by
GetValidNonCachedClientSideArg will result in a command buffer command
that contains the value returned by this function. """
return '123'
def GetNumInvalidValues(self, _func):
"""returns the number of invalid values to be tested."""
return 0
def GetInvalidArg(self, _index):
"""returns an invalid value and expected parse result by index."""
return ("---ERROR0---", "---ERROR2---", None)
def GetArgAccessor(self, cmd_struct_name):
"""Returns the name of the accessor for the argument within the struct."""
return '%s.%s' % (cmd_struct_name, self.name)
def GetLogArg(self):
"""Get argument appropriate for LOG macro."""
if self.type == 'GLboolean':
return '%sGLES2Util::GetStringBool(%s)' % (_Namespace(), self.name)
if self.type == 'GLenum':
return '%sGLES2Util::GetStringEnum(%s)' % (_Namespace(), self.name)
return self.name
def WriteGetCode(self, f):
"""Writes the code to get an argument from a command structure."""
if self.type == 'GLsync':
my_type = 'GLuint'
else:
my_type = self.type
f.write(" %s %s = static_cast<%s>(c.%s);\n" %
(my_type, self.name, my_type, self.name))
def WriteSetCode(self, f, indent, var):
f.write("%s%s = %s;\n" % (' ' * indent, self.name, var))
def WriteArgAccessor(self, f):
"""Writes specialized accessor for argument."""
pass
def WriteValidationCode(self, f, func):
"""Writes the validation code for an argument."""
pass
def WritePassthroughValidationCode(self, f, func):
"""Writes the passthrough validation code for an argument."""
pass
def WriteClientSideValidationCode(self, f, func):
"""Writes the validation code for an argument."""
pass
def WriteDestinationInitalizationValidation(self, f, func):
"""Writes the client side destintion initialization validation."""
pass
def WriteDestinationInitalizationValidatationIfNeeded(self, f, _func):
"""Writes the client side destintion initialization validation if needed."""
parts = self.type.split(" ")
if len(parts) > 1:
return
if parts[0] in self.need_validation_:
f.write(
" GPU_CLIENT_VALIDATE_DESTINATION_%sINITALIZATION(%s, %s);\n" %
("OPTIONAL_" if self.optional else "", self.type[:-1], self.name))
def GetImmediateVersion(self):
"""Gets the immediate version of this argument."""
return self
def GetBucketVersion(self):
"""Gets the bucket version of this argument."""
return self
class BoolArgument(Argument):
"""class for C++ bool"""
def __init__(self, name, _type, arg_default):
Argument.__init__(self, name, _type, arg_default)
def GetValidArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidClientSideArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidClientSideCmdArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidGLArg(self, func):
"""Gets a valid GL value for this argument."""
return 'true'
def GetArgAccessor(self, struct_name):
"""Returns the name of the accessor for the argument within the struct."""
return 'static_cast<bool>(%s.%s)' % (struct_name, self.name)
class GLBooleanArgument(Argument):
"""class for GLboolean"""
def __init__(self, name, _type, arg_default):
Argument.__init__(self, name, 'GLboolean', arg_default)
def GetValidArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidClientSideArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidClientSideCmdArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidGLArg(self, func):
"""Gets a valid GL value for this argument."""
return 'true'
class UniformLocationArgument(Argument):
"""class for uniform locations."""
def __init__(self, name, arg_default):
Argument.__init__(self, name, "GLint", arg_default)
def WriteGetCode(self, f):
"""Writes the code to get an argument from a command structure."""
code = """ %s %s = static_cast<%s>(c.%s);
"""
f.write(code % (self.type, self.name, self.type, self.name))
class DataSizeArgument(Argument):
"""class for data_size which Bucket commands do not need."""
def __init__(self, name):
Argument.__init__(self, name, "uint32_t")
def GetBucketVersion(self):
return None
class SizeArgument(Argument):
"""class for GLsizei and GLsizeiptr."""
def GetNumInvalidValues(self, func):
"""overridden from Argument."""
if func.IsImmediate():
return 0
return 1
def GetInvalidArg(self, _index):
"""overridden from Argument."""
return ("-1", "kNoError", "GL_INVALID_VALUE")
def WriteValidationCode(self, f, func):
"""overridden from Argument."""
code = """ if (%(var_name)s < 0) {
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "gl%(func_name)s", "%(var_name)s < 0");
return error::kNoError;
}
"""
f.write(code % {
"var_name": self.name,
"func_name": func.original_name,
})
def WriteClientSideValidationCode(self, f, func):
"""overridden from Argument."""
code = """ if (%(var_name)s < 0) {
SetGLError(GL_INVALID_VALUE, "gl%(func_name)s", "%(var_name)s < 0");
return;
}
"""
f.write(code % {
"var_name": self.name,
"func_name": func.original_name,
})
class SizeNotNegativeArgument(SizeArgument):
"""class for GLsizeiNotNegative. It's NEVER allowed to be negative"""
def GetInvalidArg(self, _index):
"""overridden from SizeArgument."""
return ("-1", "kOutOfBounds", "GL_NO_ERROR")
def WriteValidationCode(self, f, func):
"""overridden from SizeArgument."""
pass
class EnumBaseArgument(Argument):
"""Base class for EnumArgument, IntArgument, and BitfieldArgument."""
def __init__(self, name, gl_type, type_name, arg_type, gl_error,
named_type_info, arg_default):
Argument.__init__(self, name, gl_type, arg_default)
self.gl_error = gl_error
self.type_name = type_name
self.named_type = NamedType(named_type_info[type_name])
def IsConstant(self):
return self.named_type.IsConstant()
def GetConstantValue(self):
return self.named_type.GetConstantValue()
def WriteValidationCode(self, f, func):
if self.named_type.IsConstant():
return
f.write(" if (!validators_->%s.IsValid(%s)) {\n" %
(ToUnderscore(self.type_name), self.name))
if self.gl_error == "GL_INVALID_ENUM":
f.write(
" LOCAL_SET_GL_ERROR_INVALID_ENUM(\"gl%s\", %s, \"%s\");\n" %
(func.original_name, self.name, self.name))
else:
f.write(
" LOCAL_SET_GL_ERROR(%s, \"gl%s\", \"%s %s\");\n" %
(self.gl_error, func.original_name, self.name, self.gl_error))
f.write(" return error::kNoError;\n")
f.write(" }\n")
def WriteClientSideValidationCode(self, f, func):
if not self.named_type.IsConstant():
return
f.write(" if (%s != %s) {" % (self.name,
self.GetConstantValue()))
f.write(
" SetGLError(%s, \"gl%s\", \"%s %s\");\n" %
(self.gl_error, func.original_name, self.name, self.gl_error))
if func.return_type == "void":
f.write(" return;\n")
else:
f.write(" return %s;\n" % func.GetErrorReturnString())
f.write(" }\n")
def GetValidArg(self, func):
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
valid = self.named_type.GetValidValues()
if valid:
return valid[0]
index = func.GetOriginalArgs().index(self)
return str(index + 1)
def GetValidClientSideArg(self, func):
"""Gets a valid value for this argument."""
return self.GetValidArg(func)
def GetValidClientSideCmdArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
valid = self.named_type.GetValidValues()
if valid:
return valid[0]
try:
index = func.GetOriginalArgs().index(self)
return str(index + 1)
except ValueError:
pass
index = func.GetCmdArgs().index(self)
return str(index + 1)
def GetValidGLArg(self, func):
"""Gets a valid value for this argument."""
return self.GetValidArg(func)
def GetNumInvalidValues(self, _func):
"""returns the number of invalid values to be tested."""
return len(self.named_type.GetInvalidValues())
def GetInvalidArg(self, index):
"""returns an invalid value by index."""
invalid = self.named_type.GetInvalidValues()
if invalid:
num_invalid = len(invalid)
if index >= num_invalid:
index = num_invalid - 1
return (invalid[index], "kNoError", self.gl_error)
return ("---ERROR1---", "kNoError", self.gl_error)
class EnumArgument(EnumBaseArgument):
"""A class that represents a GLenum argument"""
def __init__(self, name, arg_type, named_type_info, arg_default):
EnumBaseArgument.__init__(self, name, "GLenum", arg_type[len("GLenum"):],
arg_type, "GL_INVALID_ENUM", named_type_info,
arg_default)
def GetLogArg(self):
"""Overridden from Argument."""
return ("GLES2Util::GetString%s(%s)" %
(self.type_name, self.name))
class EnumClassArgument(EnumBaseArgument):
"""A class that represents a C++ enum argument encoded as uint32_t"""
def __init__(self, name, arg_type, named_type_info, arg_default):
type_name = arg_type[len("EnumClass"):]
EnumBaseArgument.__init__(self, name, type_name, type_name, arg_type,
"GL_INVALID_ENUM", named_type_info, arg_default)
def GetArgAccessor(self, struct_name):
"""Returns the name of the accessor for the argument within the struct."""
return 'static_cast<%s>(%s.%s)' % (self.type_name, struct_name, self.name)
def WriteSetCode(self, f, indent, var):
f.write("%s%s = static_cast<uint32_t>(%s);\n" %
(' ' * indent, self.name, var))
def GetLogArg(self):
return 'static_cast<uint32_t>(%s)' % self.name
class IntArgument(EnumBaseArgument):
"""A class for a GLint argument that can only accept specific values.
For example glTexImage2D takes a GLint for its internalformat
argument instead of a GLenum.
"""
def __init__(self, name, arg_type, named_type_info, arg_default):
EnumBaseArgument.__init__(self, name, "GLint", arg_type[len("GLint"):],
arg_type, "GL_INVALID_VALUE", named_type_info,
arg_default)
class BitFieldArgument(EnumBaseArgument):
"""A class for a GLbitfield argument that can only accept specific values.
For example glFenceSync takes a GLbitfield for its flags argument bit it
must be 0.
"""
def __init__(self, name, arg_type, named_type_info, arg_default):
EnumBaseArgument.__init__(self, name, "GLbitfield",
arg_type[len("GLbitfield"):], arg_type,
"GL_INVALID_VALUE", named_type_info, arg_default)
class ImmediatePointerArgument(Argument):
"""A class that represents an immediate argument to a function.
An immediate argument is one where the data follows the command.
"""
def IsPointer(self):
return True
def GetPointedType(self):
match = re.match('(const\s+)?(?P<element_type>[\w]+)\s*\*', self.type)
assert match
return match.groupdict()['element_type']
def AddCmdArgs(self, args):
"""Overridden from Argument."""
pass
def WriteGetCode(self, f):
"""Overridden from Argument."""
f.write(" volatile %s %s = %sGetImmediateDataAs<volatile %s>(\n" %
(self.type, self.name, _Namespace(), self.type))
f.write(" c, %s, immediate_data_size);\n" %
self.GetReservedSizeId())
def WriteValidationCode(self, f, func):
"""Overridden from Argument."""
if self.optional:
return
f.write(" if (%s == nullptr) {\n" % self.name)
f.write(" return error::kOutOfBounds;\n")
f.write(" }\n")
def WritePassthroughValidationCode(self, f, func):
"""Overridden from Argument."""
if self.optional:
return
f.write(" if (%s == nullptr) {\n" % self.name)
f.write(" return error::kOutOfBounds;\n")
f.write(" }\n")
def GetImmediateVersion(self):
"""Overridden from Argument."""
return None
def WriteDestinationInitalizationValidation(self, f, func):
"""Overridden from Argument."""
self.WriteDestinationInitalizationValidatationIfNeeded(f, func)
def GetLogArg(self):
"""Overridden from Argument."""
return "static_cast<const void*>(%s)" % self.name
class PointerArgument(Argument):
"""A class that represents a pointer argument to a function."""
def IsPointer(self):
"""Overridden from Argument."""
return True
def IsPointer2D(self):
"""Overridden from Argument."""
return self.type.count('*') == 2
def GetPointedType(self):
match = re.match('(const\s+)?(?P<element_type>[\w]+)\s*\*', self.type)
assert match
return match.groupdict()['element_type']
def GetValidArg(self, func):
"""Overridden from Argument."""
return "shared_memory_id_, shared_memory_offset_"
def GetValidGLArg(self, func):
"""Overridden from Argument."""
return "reinterpret_cast<%s>(shared_memory_address_)" % self.type
def GetNumInvalidValues(self, _func):
"""Overridden from Argument."""
return 2
def GetInvalidArg(self, index):
"""Overridden from Argument."""
if index == 0:
return ("kInvalidSharedMemoryId, 0", "kOutOfBounds", None)
else:
return ("shared_memory_id_, kInvalidSharedMemoryOffset",
"kOutOfBounds", None)
def GetLogArg(self):
"""Overridden from Argument."""
return "static_cast<const void*>(%s)" % self.name
def AddCmdArgs(self, args):
"""Overridden from Argument."""
args.append(Argument("%s_shm_id" % self.name, 'uint32_t'))
args.append(Argument("%s_shm_offset" % self.name, 'uint32_t'))
def WriteGetCode(self, f):
"""Overridden from Argument."""
f.write(
" %s %s = GetSharedMemoryAs<%s>(\n" %
(self.type, self.name, self.type))
f.write(
" c.%s_shm_id, c.%s_shm_offset, %s);\n" %
(self.name, self.name, self.GetReservedSizeId()))
def WriteValidationCode(self, f, func):
"""Overridden from Argument."""
if self.optional:
return
f.write(" if (%s == nullptr) {\n" % self.name)
f.write(" return error::kOutOfBounds;\n")
f.write(" }\n")
def GetImmediateVersion(self):
"""Overridden from Argument."""
return ImmediatePointerArgument(self.name, self.type)
def GetBucketVersion(self):
"""Overridden from Argument."""
if self.type.find('char') >= 0:
if self.IsPointer2D():
return InputStringArrayBucketArgument(self.name, self.type)
return InputStringBucketArgument(self.name, self.type)
return BucketPointerArgument(self.name, self.type)
def WriteDestinationInitalizationValidation(self, f, func):
"""Overridden from Argument."""
self.WriteDestinationInitalizationValidatationIfNeeded(f, func)
class BucketPointerArgument(PointerArgument):
"""A class that represents an bucket argument to a function."""
def AddCmdArgs(self, args):
"""Overridden from Argument."""
pass
def WriteGetCode(self, f):
"""Overridden from Argument."""
f.write(
" %s %s = bucket->GetData(0, %s);\n" %
(self.type, self.name, self.GetReservedSizeId()))
def WriteValidationCode(self, f, func):
"""Overridden from Argument."""
pass
def GetImmediateVersion(self):
"""Overridden from Argument."""
return None
def WriteDestinationInitalizationValidation(self, f, func):
"""Overridden from Argument."""
self.WriteDestinationInitalizationValidatationIfNeeded(f, func)
def GetLogArg(self):
"""Overridden from Argument."""
return "static_cast<const void*>(%s)" % self.name
class InputStringBucketArgument(Argument):
"""A string input argument where the string is passed in a bucket."""
def __init__(self, name, _type):
Argument.__init__(self, name + "_bucket_id", "uint32_t")
def IsPointer(self):
"""Overridden from Argument."""
return True
def IsPointer2D(self):
"""Overridden from Argument."""
return False
class InputStringArrayBucketArgument(Argument):
"""A string array input argument where the strings are passed in a bucket."""
def __init__(self, name, _type):
Argument.__init__(self, name + "_bucket_id", "uint32_t")
self._original_name = name
def WriteGetCode(self, f):
"""Overridden from Argument."""
code = """
Bucket* bucket = GetBucket(c.%(name)s);
if (!bucket) {
return error::kInvalidArguments;
}
GLsizei count = 0;
std::vector<char*> strs;
std::vector<GLint> len;
if (!bucket->GetAsStrings(&count, &strs, &len)) {
return error::kInvalidArguments;
}
const char** %(original_name)s =
strs.size() > 0 ? const_cast<const char**>(&strs[0]) : nullptr;
const GLint* length =
len.size() > 0 ? const_cast<const GLint*>(&len[0]) : nullptr;
(void)length;
"""
f.write(code % {
'name': self.name,
'original_name': self._original_name,
})
def GetValidArg(self, func):
return "kNameBucketId"
def GetValidGLArg(self, func):
return "_"
def IsPointer(self):
"""Overridden from Argument."""
return True
def IsPointer2D(self):
"""Overridden from Argument."""
return True
class ResourceIdArgument(Argument):
"""A class that represents a resource id argument to a function."""
def __init__(self, name, arg_type, arg_default):
match = re.match("(GLid\w+)", arg_type)
self.resource_type = match.group(1)[4:]
if self.resource_type == "Sync":
arg_type = arg_type.replace(match.group(1), "GLsync")
else:
arg_type = arg_type.replace(match.group(1), "GLuint")
Argument.__init__(self, name, arg_type, arg_default)
def WriteGetCode(self, f):
"""Overridden from Argument."""
if self.type == "GLsync":
my_type = "GLuint"
else:
my_type = self.type
f.write(" %s %s = %s;\n" % (my_type, self.name, self.GetArgAccessor('c')))
def GetValidArg(self, func):
return "client_%s_id_" % self.resource_type.lower()
def GetValidGLArg(self, func):
if self.resource_type == "Sync":
return "reinterpret_cast<GLsync>(kService%sId)" % self.resource_type
return "kService%sId" % self.resource_type
class ResourceIdBindArgument(Argument):
"""Represents a resource id argument to a bind function."""
def __init__(self, name, arg_type, arg_default):
match = re.match("(GLidBind\w+)", arg_type)
self.resource_type = match.group(1)[8:]
arg_type = arg_type.replace(match.group(1), "GLuint")
Argument.__init__(self, name, arg_type, arg_default)
def WriteGetCode(self, f):
"""Overridden from Argument."""
code = """ %(type)s %(name)s = c.%(name)s;
"""
f.write(code % {'type': self.type, 'name': self.name})
def GetValidArg(self, func):
return "client_%s_id_" % self.resource_type.lower()
def GetValidGLArg(self, func):
return "kService%sId" % self.resource_type
class ResourceIdZeroArgument(Argument):
"""Represents a resource id argument to a function that can be zero."""
def __init__(self, name, arg_type, arg_default):
match = re.match("(GLidZero\w+)", arg_type)
self.resource_type = match.group(1)[8:]
arg_type = arg_type.replace(match.group(1), "GLuint")
Argument.__init__(self, name, arg_type, arg_default)
def WriteGetCode(self, f):
"""Overridden from Argument."""
f.write(" %s %s = %s;\n" % (self.type, self.name,
self.GetArgAccessor('c')))
def GetValidArg(self, func):
return "client_%s_id_" % self.resource_type.lower()
def GetValidGLArg(self, func):
return "kService%sId" % self.resource_type
def GetNumInvalidValues(self, _func):
"""returns the number of invalid values to be tested."""
return 1
def GetInvalidArg(self, _index):
"""returns an invalid value by index."""
return ("kInvalidClientId", "kNoError", "GL_INVALID_VALUE")
class Int64Argument(Argument):
"""Represents a GLuint64 argument which splits up into 2 uint32_t items."""
def __init__(self, name, arg_type, arg_default):
Argument.__init__(self, name, arg_type, arg_default)
def GetArgAccessor(self, cmd_struct_name):
return "%s.%s()" % (cmd_struct_name, self.name)
def WriteArgAccessor(self, f):
"""Writes specialized accessor for compound members."""
f.write(" %s %s() const volatile {\n" % (self.type, self.name))
f.write(" return static_cast<%s>(\n" % self.type)
f.write(" %sGLES2Util::MapTwoUint32ToUint64(\n" % _Namespace())
f.write(" %s_0,\n" % self.name)
f.write(" %s_1));\n" % self.name)
f.write(" }\n")
f.write("\n")
def WriteGetCode(self, f):
"""Writes the code to get an argument from a command structure."""
f.write(" %s %s = c.%s();\n" % (self.type, self.name, self.name))
def WriteSetCode(self, f, indent, var):
indent_str = ' ' * indent
f.write("%s%sGLES2Util::MapUint64ToTwoUint32(static_cast<uint64_t>(%s),\n" %
(indent_str, _Namespace(), var))
f.write("%s &%s_0,\n" %
(indent_str, self.name))
f.write("%s &%s_1);\n" %
(indent_str, self.name))
class Function(object):
"""A class that represents a function."""
def __init__(self, name, info, named_type_info, type_handlers):
self.name = name
self.named_type_info = named_type_info
self.prefixed_name = info['prefixed_name']
self.original_name = info['original_name']
self.original_args = self.ParseArgs(info['original_args'])
if 'cmd_args' in info:
self.args_for_cmds = self.ParseArgs(info['cmd_args'])
else:
self.args_for_cmds = self.original_args[:]
self.passthrough_service_doer_args = self.original_args[:]
if 'size_args' in info:
self.size_args = info['size_args']
else:
self.size_args = {}
self.return_type = info['return_type']
if self.return_type != 'void':
self.return_arg = CreateArg(info['return_type'] + " result",
named_type_info)
else:
self.return_arg = None
self.num_pointer_args = sum(
[1 for arg in self.args_for_cmds if arg.IsPointer()])
if self.num_pointer_args > 0:
for arg in reversed(self.original_args):
if arg.IsPointer():
self.last_original_pointer_arg = arg
break
else:
self.last_original_pointer_arg = None
self.info = info
self.type_handler = type_handlers[info['type']]
self.can_auto_generate = (self.num_pointer_args == 0 and
info['return_type'] == "void")
# Satisfy pylint warning attribute-defined-outside-init.
#
# self.cmd_args is typically set in InitFunction, but that method may be
# overriden.
self.cmd_args = []
self.InitFunction()
def ParseArgs(self, arg_string):
"""Parses a function arg string."""
args = []
parts = arg_string.split(',')
for arg_string in parts:
arg = CreateArg(arg_string, self.named_type_info)
if arg:
args.append(arg)
return args
def IsType(self, type_name):
"""Returns true if function is a certain type."""
return self.info['type'] == type_name
def InitFunction(self):
"""Creates command args and calls the init function for the type handler.
Creates argument lists for command buffer commands, eg. self.cmd_args and
self.init_args.
Calls the type function initialization.
Override to create different kind of command buffer command argument lists.
"""
self.cmd_args = []
for arg in self.args_for_cmds:
arg.AddCmdArgs(self.cmd_args)
self.init_args = []
for arg in self.args_for_cmds:
arg.AddInitArgs(self.init_args)
if self.return_arg:
self.init_args.append(self.return_arg)
self.type_handler.InitFunction(self)
def IsImmediate(self):
"""Returns whether the function is immediate data function or not."""
return False
def IsES3(self):
"""Returns whether the function requires an ES3 context or not."""
return self.GetInfo('es3', False)
def IsES31(self):
"""Returns whether the function requires an ES31 context or not."""
return self.GetInfo('es31', False)
def GetInfo(self, name, default = None):
"""Returns a value from the function info for this function."""
if name in self.info:
return self.info[name]
return default
def GetValidArg(self, arg):
"""Gets a valid argument value for the parameter arg from the function info
if one exists."""
try:
index = self.GetOriginalArgs().index(arg)
except ValueError:
return None
valid_args = self.GetInfo('valid_args')
if valid_args and str(index) in valid_args:
return valid_args[str(index)]
return None
def AddInfo(self, name, value):
"""Adds an info."""
self.info[name] = value
def IsExtension(self):
return self.GetInfo('extension') or self.GetInfo('extension_flag')
def IsCoreGLFunction(self):
return (not self.IsExtension() and
not self.GetInfo('pepper_interface') and
not self.IsES3() and
not self.IsES31())
def InPepperInterface(self, interface):
ext = self.GetInfo('pepper_interface')
if not interface.GetName():
return self.IsCoreGLFunction()
return ext == interface.GetName()
def InAnyPepperExtension(self):
return self.IsCoreGLFunction() or self.GetInfo('pepper_interface')
def GetErrorReturnString(self):
if self.GetInfo("error_return"):
return self.GetInfo("error_return")
elif self.return_type == "GLboolean":
return "GL_FALSE"
elif "*" in self.return_type:
return "nullptr"
return "0"
def GetGLFunctionName(self):
"""Gets the function to call to execute GL for this command."""
if self.GetInfo('decoder_func'):
return self.GetInfo('decoder_func')
return "api()->gl%sFn" % self.original_name
def GetGLTestFunctionName(self):
gl_func_name = self.GetInfo('gl_test_func')
if gl_func_name == None:
gl_func_name = self.GetGLFunctionName()
if gl_func_name.startswith("gl"):
gl_func_name = gl_func_name[2:]
else:
gl_func_name = self.original_name
return gl_func_name
def GetDataTransferMethods(self):
return self.GetInfo('data_transfer_methods',
['immediate' if self.num_pointer_args == 1 else 'shm'])
def AddCmdArg(self, arg):
"""Adds a cmd argument to this function."""
self.cmd_args.append(arg)
def GetCmdArgs(self):
"""Gets the command args for this function."""
return self.cmd_args
def ClearCmdArgs(self):
"""Clears the command args for this function."""
self.cmd_args = []
def GetCmdConstants(self):
"""Gets the constants for this function."""
return [arg for arg in self.args_for_cmds if arg.IsConstant()]
def GetInitArgs(self):
"""Gets the init args for this function."""
return self.init_args
def GetOriginalArgs(self):
"""Gets the original arguments to this function."""
return self.original_args
def GetPassthroughServiceDoerArgs(self):
"""Gets the original arguments to this function."""
return self.passthrough_service_doer_args
def GetLastOriginalArg(self):
"""Gets the last original argument to this function."""
return self.original_args[len(self.original_args) - 1]
def GetLastOriginalPointerArg(self):
return self.last_original_pointer_arg
def GetResourceIdArg(self):
for arg in self.original_args:
if hasattr(arg, 'resource_type'):
return arg
return None
def _MaybePrependComma(self, arg_string, add_comma):
"""Adds a comma if arg_string is not empty and add_comma is true."""
comma = ""
if add_comma and len(arg_string):
comma = ", "
return "%s%s" % (comma, arg_string)
def MakeTypedOriginalArgString(self, prefix, add_comma = False,
add_default = False):
"""Gets a list of arguments as they are in GL."""
args = self.GetOriginalArgs()
def ArgToString(arg):
tmp = [arg.type, prefix + arg.name]
if add_default and arg.default:
tmp.append("=")
tmp.append(arg.default)
return " ".join(tmp)
arg_string = ", ".join([ArgToString(arg) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeOriginalArgString(self, prefix, add_comma = False, separator = ", "):
"""Gets the list of arguments as they are in GL."""
args = self.GetOriginalArgs()
arg_string = separator.join(
["%s%s" % (prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakePassthroughServiceDoerArgString(self, prefix, add_comma = False,
separator = ", "):
"""Gets the list of arguments as they are in used by the passthrough
service doer function."""
args = self.GetPassthroughServiceDoerArgs()
arg_string = separator.join(
["%s%s" % (prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeHelperArgString(self, prefix, add_comma = False, separator = ", "):
"""Gets a list of GL arguments after removing unneeded arguments."""
args = self.GetOriginalArgs()
arg_string = separator.join(
["%s%s" % (prefix, arg.name)
for arg in args if not arg.IsConstant()])
return self._MaybePrependComma(arg_string, add_comma)
def MakeTypedPepperArgString(self, prefix):
"""Gets a list of arguments as they need to be for Pepper."""
if self.GetInfo("pepper_args"):
return self.GetInfo("pepper_args")
else:
return self.MakeTypedOriginalArgString(prefix, False)
def MapCTypeToPepperIdlType(self, ctype, is_for_return_type=False):
"""Converts a C type name to the corresponding Pepper IDL type."""
idltype = {
'char*': '[out] str_t',
'const GLchar* const*': '[out] cstr_t',
'const char*': 'cstr_t',
'const void*': 'mem_t',
'void*': '[out] mem_t',
'void**': '[out] mem_ptr_t',
}.get(ctype, ctype)
# We use "GLxxx_ptr_t" for "GLxxx*".
matched = re.match(r'(const )?(GL\w+)\*$', ctype)
if matched:
idltype = matched.group(2) + '_ptr_t'
if not matched.group(1):
idltype = '[out] ' + idltype
# If an in/out specifier is not specified yet, prepend [in].
if idltype[0] != '[':
idltype = '[in] ' + idltype
# Strip the in/out specifier for a return type.
if is_for_return_type:
idltype = re.sub(r'\[\w+\] ', '', idltype)
return idltype
def MakeTypedPepperIdlArgStrings(self):
"""Gets a list of arguments as they need to be for Pepper IDL."""
args = self.GetOriginalArgs()
return ["%s %s" % (self.MapCTypeToPepperIdlType(arg.type), arg.name)
for arg in args]
def GetPepperName(self):
if self.GetInfo("pepper_name"):
return self.GetInfo("pepper_name")
return self.name
def MakeTypedCmdArgString(self, prefix, add_comma = False):
"""Gets a typed list of arguments as they need to be for command buffers."""
args = self.GetCmdArgs()
arg_string = ", ".join(
["%s %s%s" % (arg.type, prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeCmdArgString(self, prefix, add_comma = False):
"""Gets the list of arguments as they need to be for command buffers."""
args = self.GetCmdArgs()
arg_string = ", ".join(
["%s%s" % (prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeTypedInitString(self, prefix, add_comma = False):
"""Gets a typed list of arguments as they need to be for cmd Init/Set."""
args = self.GetInitArgs()
arg_string = ", ".join(
["%s %s%s" % (arg.type, prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeInitString(self, prefix, add_comma = False):
"""Gets the list of arguments as they need to be for cmd Init/Set."""
args = self.GetInitArgs()
arg_string = ", ".join(
["%s%s" % (prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeLogArgString(self):
"""Makes a string of the arguments for the LOG macros"""
args = self.GetOriginalArgs()
return ' << ", " << '.join([arg.GetLogArg() for arg in args])
def WriteHandlerValidation(self, f):
"""Writes validation code for the function."""
for arg in self.GetOriginalArgs():
arg.WriteValidationCode(f, self)
self.WriteValidationCode(f)
def WriteQueueTraceEvent(self, f):
if self.GetInfo("trace_queueing_flow", False):
trace = 'TRACE_DISABLED_BY_DEFAULT("gpu_cmd_queue")'
f.write("""if (c.trace_id) {
TRACE_EVENT_WITH_FLOW0(%s, "CommandBufferQueue",
c.trace_id, TRACE_EVENT_FLAG_FLOW_IN);\n}""" % trace)
def WritePassthroughHandlerValidation(self, f):
"""Writes validation code for the function."""
for arg in self.GetOriginalArgs():
arg.WritePassthroughValidationCode(f, self)
def WriteHandlerImplementation(self, f):
"""Writes the handler implementation for this command."""
self.type_handler.WriteHandlerImplementation(self, f)
def WriteValidationCode(self, f):
"""Writes the validation code for a command."""
pass
def WriteCmdFlag(self, f):
"""Writes the cmd cmd_flags constant."""
# By default trace only at the highest level 3.
trace_level = int(self.GetInfo('trace_level', default = 3))
if trace_level not in xrange(0, 4):
raise KeyError("Unhandled trace_level: %d" % trace_level)
cmd_flags = ('CMD_FLAG_SET_TRACE_LEVEL(%d)' % trace_level)
f.write(" static const uint8_t cmd_flags = %s;\n" % cmd_flags)
def WriteCmdArgFlag(self, f):
"""Writes the cmd kArgFlags constant."""
f.write(" static const cmd::ArgFlags kArgFlags = cmd::kFixed;\n")
def WriteCmdComputeSize(self, f):
"""Writes the ComputeSize function for the command."""
f.write(" static uint32_t ComputeSize() {\n")
f.write(
" return static_cast<uint32_t>(sizeof(ValueType)); // NOLINT\n")
f.write(" }\n")
f.write("\n")
def WriteCmdSetHeader(self, f):
"""Writes the cmd's SetHeader function."""
f.write(" void SetHeader() {\n")
f.write(" header.SetCmd<ValueType>();\n")
f.write(" }\n")
f.write("\n")
def WriteCmdInit(self, f):
"""Writes the cmd's Init function."""
f.write(" void Init(%s) {\n" % self.MakeTypedCmdArgString("_"))
f.write(" SetHeader();\n")
args = self.GetCmdArgs()
for arg in args:
arg.WriteSetCode(f, 4, '_%s' % arg.name)
if self.GetInfo("trace_queueing_flow", False):
trace = 'TRACE_DISABLED_BY_DEFAULT("gpu_cmd_queue")'
f.write('bool is_tracing = false;')
f.write('TRACE_EVENT_CATEGORY_GROUP_ENABLED(%s, &is_tracing);' % trace)
f.write('if (is_tracing) {')
f.write(' trace_id = base::RandUint64();')
f.write('TRACE_EVENT_WITH_FLOW1(%s, "CommandBufferQueue",' % trace)
f.write('trace_id, TRACE_EVENT_FLAG_FLOW_OUT,')
f.write('"command", "%s");' % self.name)
f.write('} else {\n trace_id = 0;\n}\n');
f.write("}\n")
f.write("\n")
def WriteCmdSet(self, f):
"""Writes the cmd's Set function."""
copy_args = self.MakeCmdArgString("_", False)
f.write(" void* Set(void* cmd%s) {\n" %
self.MakeTypedCmdArgString("_", True))
f.write(" static_cast<ValueType*>(cmd)->Init(%s);\n" % copy_args)
f.write(" return NextCmdAddress<ValueType>(cmd);\n")
f.write(" }\n")
f.write("\n")
def WriteArgAccessors(self, f):
"""Writes the cmd's accessor functions."""
for arg in self.GetCmdArgs():
arg.WriteArgAccessor(f)
def WriteStruct(self, f):
self.type_handler.WriteStruct(self, f)
def WriteDocs(self, f):
self.type_handler.WriteDocs(self, f)
def WriteCmdHelper(self, f):
"""Writes the cmd's helper."""
self.type_handler.WriteCmdHelper(self, f)
def WriteServiceImplementation(self, f):
"""Writes the service implementation for a command."""
self.type_handler.WriteServiceImplementation(self, f)
def WritePassthroughServiceImplementation(self, f):
"""Writes the service implementation for a command."""
self.type_handler.WritePassthroughServiceImplementation(self, f)
def WriteServiceUnitTest(self, f, *extras):
"""Writes the service implementation for a command."""
self.type_handler.WriteServiceUnitTest(self, f, *extras)
def WriteGLES2CLibImplementation(self, f):
"""Writes the GLES2 C Lib Implemention."""
self.type_handler.WriteGLES2CLibImplementation(self, f)
def WriteGLES2InterfaceHeader(self, f):
"""Writes the GLES2 Interface declaration."""
self.type_handler.WriteGLES2InterfaceHeader(self, f)
def WriteGLES2InterfaceStub(self, f):
"""Writes the GLES2 Interface Stub declaration."""
self.type_handler.WriteGLES2InterfaceStub(self, f)
def WriteGLES2InterfaceStubImpl(self, f):
"""Writes the GLES2 Interface Stub declaration."""
self.type_handler.WriteGLES2InterfaceStubImpl(self, f)
def WriteGLES2ImplementationHeader(self, f):
"""Writes the GLES2 Implemention declaration."""
self.type_handler.WriteGLES2ImplementationHeader(self, f)
def WriteGLES2Implementation(self, f):
"""Writes the GLES2 Implemention definition."""
self.type_handler.WriteGLES2Implementation(self, f)
def WriteGLES2TraceImplementationHeader(self, f):
"""Writes the GLES2 Trace Implemention declaration."""
self.type_handler.WriteGLES2TraceImplementationHeader(self, f)
def WriteGLES2TraceImplementation(self, f):
"""Writes the GLES2 Trace Implemention definition."""
self.type_handler.WriteGLES2TraceImplementation(self, f)
def WriteGLES2Header(self, f):
"""Writes the GLES2 Implemention unit test."""
self.type_handler.WriteGLES2Header(self, f)
def WriteGLES2ImplementationUnitTest(self, f):
"""Writes the GLES2 Implemention unit test."""
self.type_handler.WriteGLES2ImplementationUnitTest(self, f)
def WriteDestinationInitalizationValidation(self, f):
"""Writes the client side destintion initialization validation."""
self.type_handler.WriteDestinationInitalizationValidation(self, f)
def WriteFormatTest(self, f):
"""Writes the cmd's format test."""
self.type_handler.WriteFormatTest(self, f)
class PepperInterface(object):
"""A class that represents a function."""
def __init__(self, info):
self.name = info["name"]
self.dev = info["dev"]
def GetName(self):
return self.name
def GetInterfaceName(self):
upperint = ""
dev = ""
if self.name:
upperint = "_" + self.name.upper()
if self.dev:
dev = "_DEV"
return "PPB_OPENGLES2%s%s_INTERFACE" % (upperint, dev)
def GetStructName(self):
dev = ""
if self.dev:
dev = "_Dev"
return "PPB_OpenGLES2%s%s" % (self.name, dev)
class ImmediateFunction(Function):
"""A class that represents an immediate function command."""
def __init__(self, func, type_handlers):
Function.__init__(
self,
"%sImmediate" % func.name,
func.info,
func.named_type_info,
type_handlers)
def InitFunction(self):
# Override args in original_args and args_for_cmds with immediate versions
# of the args.
new_original_args = []
for arg in self.original_args:
new_arg = arg.GetImmediateVersion()
if new_arg:
new_original_args.append(new_arg)
self.original_args = new_original_args
new_args_for_cmds = []
for arg in self.args_for_cmds:
new_arg = arg.GetImmediateVersion()
if new_arg:
new_args_for_cmds.append(new_arg)
self.args_for_cmds = new_args_for_cmds
Function.InitFunction(self)
def IsImmediate(self):
return True
def WriteServiceImplementation(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateServiceImplementation(self, f)
def WritePassthroughServiceImplementation(self, f):
"""Overridden from Function"""
self.type_handler.WritePassthroughImmediateServiceImplementation(self, f)
def WriteHandlerImplementation(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateHandlerImplementation(self, f)
def WriteServiceUnitTest(self, f, *extras):
"""Writes the service implementation for a command."""
self.type_handler.WriteImmediateServiceUnitTest(self, f, *extras)
def WriteValidationCode(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateValidationCode(self, f)
def WriteCmdArgFlag(self, f):
"""Overridden from Function"""
f.write(" static const cmd::ArgFlags kArgFlags = cmd::kAtLeastN;\n")
def WriteCmdComputeSize(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdComputeSize(self, f)
def WriteCmdSetHeader(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdSetHeader(self, f)
def WriteCmdInit(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdInit(self, f)
def WriteCmdSet(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdSet(self, f)
def WriteCmdHelper(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdHelper(self, f)
def WriteFormatTest(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateFormatTest(self, f)
class BucketFunction(Function):
"""A class that represnets a bucket version of a function command."""
def __init__(self, func, type_handlers):
Function.__init__(
self,
"%sBucket" % func.name,
func.info,
func.named_type_info,
type_handlers)
def InitFunction(self):
# Override args in original_args and args_for_cmds with bucket versions
# of the args.
new_original_args = []
for arg in self.original_args:
new_arg = arg.GetBucketVersion()
if new_arg:
new_original_args.append(new_arg)
self.original_args = new_original_args
new_args_for_cmds = []
for arg in self.args_for_cmds:
new_arg = arg.GetBucketVersion()
if new_arg:
new_args_for_cmds.append(new_arg)
self.args_for_cmds = new_args_for_cmds
Function.InitFunction(self)
def WriteServiceImplementation(self, f):
"""Overridden from Function"""
self.type_handler.WriteBucketServiceImplementation(self, f)
def WritePassthroughServiceImplementation(self, f):
"""Overridden from Function"""
self.type_handler.WritePassthroughBucketServiceImplementation(self, f)
def WriteHandlerImplementation(self, f):
"""Overridden from Function"""
self.type_handler.WriteBucketHandlerImplementation(self, f)
def WriteServiceUnitTest(self, f, *extras):
"""Overridden from Function"""
self.type_handler.WriteBucketServiceUnitTest(self, f, *extras)
def MakeOriginalArgString(self, prefix, add_comma = False, separator = ", "):
"""Overridden from Function"""
args = self.GetOriginalArgs()
arg_string = separator.join(
["%s%s" % (prefix, arg.name[0:-10] if arg.name.endswith("_bucket_id")
else arg.name) for arg in args])
return super(BucketFunction, self)._MaybePrependComma(arg_string, add_comma)
def CreateArg(arg_string, named_type_info):
"""Convert string argument to an Argument class that represents it.
The parameter 'arg_string' can be a single argument to a GL function,
something like 'GLsizei width' or 'const GLenum* bufs'. Returns an instance of
the Argument class, or None if 'arg_string' is 'void'.
"""
if arg_string == 'void':
return None
arg_string = arg_string.strip()
arg_default = None
if '=' in arg_string:
arg_string, arg_default = arg_string.split('=')
arg_default = arg_default.strip()
arg_parts = arg_string.split()
assert len(arg_parts) > 1
arg_name = arg_parts[-1]
arg_type = " ".join(arg_parts[0:-1])
t = arg_parts[0] # only the first part of arg_type
# Is this a pointer argument?
if arg_string.find('*') >= 0:
return PointerArgument(arg_name, arg_type, arg_default)
elif t.startswith('EnumClass'):
return EnumClassArgument(arg_name, arg_type, named_type_info, arg_default)
# Is this a resource argument? Must come after pointer check.
elif t.startswith('GLidBind'):
return ResourceIdBindArgument(arg_name, arg_type, arg_default)
elif t.startswith('GLidZero'):
return ResourceIdZeroArgument(arg_name, arg_type, arg_default)
elif t.startswith('GLid'):
return ResourceIdArgument(arg_name, arg_type, arg_default)
elif t.startswith('GLenum') and t !='GLenum':
return EnumArgument(arg_name, arg_type, named_type_info, arg_default)
elif t.startswith('GLbitfield') and t != 'GLbitfield':
return BitFieldArgument(arg_name, arg_type, named_type_info, arg_default)
elif t.startswith('GLboolean'):
return GLBooleanArgument(arg_name, arg_type, arg_default)
elif t.startswith('GLintUniformLocation'):
return UniformLocationArgument(arg_name, arg_default)
elif (t.startswith('GLint') and t != 'GLint' and
not t.startswith('GLintptr')):
return IntArgument(arg_name, arg_type, named_type_info, arg_default)
elif t == 'bool':
return BoolArgument(arg_name, arg_type, arg_default)
elif t == 'GLsizeiNotNegative' or t == 'GLintptrNotNegative':
return SizeNotNegativeArgument(arg_name, t.replace('NotNegative', ''),
arg_default)
elif t.startswith('GLsize'):
return SizeArgument(arg_name, arg_type, arg_default)
elif t == 'GLuint64' or t == 'GLint64':
return Int64Argument(arg_name, arg_type, arg_default)
else:
return Argument(arg_name, arg_type, arg_default)
class GLGenerator(object):
"""A class to generate GL command buffers."""
_whitespace_re = re.compile(r'^\w*$')
_comment_re = re.compile(r'^//.*$')
_function_re = re.compile(r'^GL_APICALL(.*?)GL_APIENTRY (.*?) \((.*?)\);$')
def __init__(self, verbose, year, function_info, named_type_info,
chromium_root_dir):
self.original_functions = []
self.functions = []
self.chromium_root_dir = chromium_root_dir
self.verbose = verbose
self.year = year
self.errors = 0
self.pepper_interfaces = []
self.interface_info = {}
self.generated_cpp_filenames = []
self.function_info = function_info
self.named_type_info = named_type_info
self.capability_flags = _CAPABILITY_FLAGS
self.type_handlers = {
'': TypeHandler(),
'Bind': BindHandler(),
'Create': CreateHandler(),
'Custom': CustomHandler(),
'Data': DataHandler(),
'Delete': DeleteHandler(),
'DELn': DELnHandler(),
'GENn': GENnHandler(),
'GETn': GETnHandler(),
'GLchar': GLcharHandler(),
'GLcharN': GLcharNHandler(),
'Is': IsHandler(),
'NoCommand': NoCommandHandler(),
'PUT': PUTHandler(),
'PUTn': PUTnHandler(),
'PUTSTR': PUTSTRHandler(),
'PUTXn': PUTXnHandler(),
'StateSet': StateSetHandler(),
'StateSetRGBAlpha': StateSetRGBAlphaHandler(),
'StateSetFrontBack': StateSetFrontBackHandler(),
'StateSetFrontBackSeparate':
StateSetFrontBackSeparateHandler(),
'StateSetNamedParameter': StateSetNamedParameter(),
'STRn': STRnHandler(),
}
for interface in _PEPPER_INTERFACES:
interface = PepperInterface(interface)
self.pepper_interfaces.append(interface)
self.interface_info[interface.GetName()] = interface
def AddFunction(self, func):
"""Adds a function."""
self.functions.append(func)
def GetFunctionInfo(self, name):
"""Gets a type info for the given function name."""
if name in self.function_info:
func_info = self.function_info[name].copy()
else:
func_info = {}
if not 'type' in func_info:
func_info['type'] = ''
return func_info
def Log(self, msg):
"""Prints something if verbose is true."""
if self.verbose:
print msg
def Error(self, msg):
"""Prints an error."""
print "Error: %s" % msg
self.errors += 1
def ParseGLH(self, filename):
"""Parses the cmd_buffer_functions.txt file and extracts the functions"""
filename = os.path.join(self.chromium_root_dir, filename)
with open(filename, "r") as f:
functions = f.read()
for line in functions.splitlines():
if self._whitespace_re.match(line) or self._comment_re.match(line):
continue
match = self._function_re.match(line)
if match:
prefixed_name = match.group(2)
func_name = prefixed_name[2:]
func_info = self.GetFunctionInfo(func_name)
if func_info['type'] == 'Noop':
continue
parsed_func_info = {
'prefixed_name': prefixed_name,
'original_name': func_name,
'original_args': match.group(3),
'return_type': match.group(1).strip(),
}
for k in parsed_func_info.keys():
if not k in func_info:
func_info[k] = parsed_func_info[k]
f = Function(func_name, func_info, self.named_type_info,
self.type_handlers)
if not f.GetInfo('internal'):
self.original_functions.append(f)
#for arg in f.GetOriginalArgs():
# if not isinstance(arg, EnumArgument) and arg.type == 'GLenum':
# self.Log("%s uses bare GLenum %s." % (func_name, arg.name))
func_type = f.GetInfo('type')
if func_type != 'NoCommand':
if f.type_handler.NeedsDataTransferFunction(f):
methods = f.GetDataTransferMethods()
if 'immediate' in methods:
self.AddFunction(ImmediateFunction(f, self.type_handlers))
if 'bucket' in methods:
self.AddFunction(BucketFunction(f, self.type_handlers))
if 'shm' in methods:
self.AddFunction(f)
else:
self.AddFunction(f)
else:
self.Error("Could not parse function: %s using regex: %s" %
(line, self._function_re.pattern))
self.Log("Auto Generated Functions : %d" %
len([f for f in self.functions if f.can_auto_generate or
(not f.IsType('') and not f.IsType('Custom') and
not f.IsType('Todo'))]))
funcs = [f for f in self.functions if not f.can_auto_generate and
(f.IsType('') or f.IsType('Custom') or f.IsType('Todo'))]
self.Log("Non Auto Generated Functions: %d" % len(funcs))
for f in funcs:
self.Log(" %-10s %-20s gl%s" % (f.info['type'], f.return_type, f.name))
def WriteCommandIds(self, filename):
"""Writes the command buffer format"""
with CHeaderWriter(filename, self.year) as f:
f.write("#define %s_COMMAND_LIST(OP) \\\n" % _upper_prefix)
cmd_id = 256
for func in self.functions:
f.write(" %-60s /* %d */ \\\n" %
("OP(%s)" % func.name, cmd_id))
cmd_id += 1
f.write("\n")
f.write("enum CommandId {\n")
f.write(" kOneBeforeStartPoint = cmd::kLastCommonId, "
"// All %s commands start after this.\n" % _prefix)
f.write("#define %s_CMD_OP(name) k ## name,\n" % _upper_prefix)
f.write(" %s_COMMAND_LIST(%s_CMD_OP)\n" %
(_upper_prefix, _upper_prefix))
f.write("#undef %s_CMD_OP\n" % _upper_prefix)
f.write(" kNumCommands,\n")
f.write(" kFirst%sCommand = kOneBeforeStartPoint + 1\n" % _prefix)
f.write("};\n")
f.write("\n")
self.generated_cpp_filenames.append(filename)
def WriteFormat(self, filename):
"""Writes the command buffer format"""
with CHeaderWriter(filename, self.year) as f:
# Forward declaration of a few enums used in constant argument
# to avoid including GL header files.
enum_defines = {'GL_SCANOUT_CHROMIUM': '0x6000'}
if 'FenceSync' in self.function_info:
enum_defines['GL_SYNC_GPU_COMMANDS_COMPLETE'] = '0x9117'
if 'ClientWaitSync' in self.function_info:
enum_defines['GL_SYNC_FLUSH_COMMANDS_BIT'] = '0x00000001'
f.write('\n')
for enum in enum_defines:
f.write("#define %s %s\n" % (enum, enum_defines[enum]))
f.write('\n')
for func in self.functions:
func.WriteStruct(f)
f.write("\n")
self.generated_cpp_filenames.append(filename)
def WriteDocs(self, filename):
"""Writes the command buffer doc version of the commands"""
with CHeaderWriter(filename, self.year) as f:
for func in self.functions:
func.WriteDocs(f)
f.write("\n")
self.generated_cpp_filenames.append(filename)
def WriteFormatTest(self, filename):
"""Writes the command buffer format test."""
comment = ("// This file contains unit tests for %s commands\n"
"// It is included by %s_cmd_format_test.cc\n\n" %
(_lower_prefix, _lower_prefix))
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.functions:
func.WriteFormatTest(f)
self.generated_cpp_filenames.append(filename)
def WriteCmdHelperHeader(self, filename):
"""Writes the gles2 command helper."""
with CHeaderWriter(filename, self.year) as f:
for func in self.functions:
func.WriteCmdHelper(f)
self.generated_cpp_filenames.append(filename)
def WriteServiceContextStateHeader(self, filename):
"""Writes the service context state header."""
comment = "// It is included by context_state.h\n"
with CHeaderWriter(filename, self.year, comment) as f:
f.write("struct EnableFlags {\n")
f.write(" EnableFlags();\n")
for capability in self.capability_flags:
f.write(" bool %s;\n" % capability['name'])
f.write(" bool cached_%s;\n" % capability['name'])
f.write("};\n\n")
for state_name in sorted(_STATE_INFO.keys()):
state = _STATE_INFO[state_name]
for item in state['states']:
if isinstance(item['default'], list):
f.write("%s %s[%d];\n" % (item['type'], item['name'],
len(item['default'])))
else:
f.write("%s %s;\n" % (item['type'], item['name']))
if item.get('cached', False):
if isinstance(item['default'], list):
f.write("%s cached_%s[%d];\n" % (item['type'], item['name'],
len(item['default'])))
else:
f.write("%s cached_%s;\n" % (item['type'], item['name']))
f.write("\n")
f.write("""
inline void SetDeviceCapabilityState(GLenum cap, bool enable) {
switch (cap) {
""")
for capability in self.capability_flags:
f.write("""\
case GL_%s:
""" % capability['name'].upper())
f.write("""\
if (enable_flags.cached_%(name)s == enable &&
!ignore_cached_state)
return;
enable_flags.cached_%(name)s = enable;
break;
""" % capability)
f.write("""\
default:
NOTREACHED();
return;
}
if (enable)
api()->glEnableFn(cap);
else
api()->glDisableFn(cap);
}
""")
self.generated_cpp_filenames.append(filename)
def WriteClientContextStateHeader(self, filename):
"""Writes the client context state header."""
comment = "// It is included by client_context_state.h\n"
with CHeaderWriter(filename, self.year, comment) as f:
f.write("struct EnableFlags {\n")
f.write(" EnableFlags();\n")
for capability in self.capability_flags:
if 'extension_flag' in capability:
continue
f.write(" bool %s;\n" % capability['name'])
f.write("};\n\n")
self.generated_cpp_filenames.append(filename)
def WriteContextStateGetters(self, f, class_name):
"""Writes the state getters."""
for gl_type in ["GLint", "GLfloat"]:
f.write("""
bool %s::GetStateAs%s(
GLenum pname, %s* params, GLsizei* num_written) const {
switch (pname) {
""" % (class_name, gl_type, gl_type))
for state_name in sorted(_STATE_INFO.keys()):
state = _STATE_INFO[state_name]
if 'enum' in state:
f.write(" case %s:\n" % state['enum'])
f.write(" *num_written = %d;\n" % len(state['states']))
f.write(" if (params) {\n")
for ndx,item in enumerate(state['states']):
f.write(" params[%d] = static_cast<%s>(%s);\n" %
(ndx, gl_type, item['name']))
f.write(" }\n")
f.write(" return true;\n")
else:
for item in state['states']:
f.write(" case %s:\n" % item['enum'])
if isinstance(item['default'], list):
item_len = len(item['default'])
f.write(" *num_written = %d;\n" % item_len)
f.write(" if (params) {\n")
if item['type'] == gl_type:
f.write(" memcpy(params, %s, sizeof(%s) * %d);\n" %
(item['name'], item['type'], item_len))
else:
f.write(" for (size_t i = 0; i < %s; ++i) {\n" %
item_len)
f.write(" params[i] = %s;\n" %
(GetGLGetTypeConversion(gl_type, item['type'],
"%s[i]" % item['name'])))
f.write(" }\n");
else:
f.write(" *num_written = 1;\n")
f.write(" if (params) {\n")
f.write(" params[0] = %s;\n" %
(GetGLGetTypeConversion(gl_type, item['type'],
item['name'])))
f.write(" }\n")
f.write(" return true;\n")
for capability in self.capability_flags:
f.write(" case GL_%s:\n" % capability['name'].upper())
f.write(" *num_written = 1;\n")
f.write(" if (params) {\n")
f.write(
" params[0] = static_cast<%s>(enable_flags.%s);\n" %
(gl_type, capability['name']))
f.write(" }\n")
f.write(" return true;\n")
f.write(""" default:
return false;
}
}
""")
def WriteServiceContextStateImpl(self, filename):
"""Writes the context state service implementation."""
comment = "// It is included by context_state.cc\n"
with CHeaderWriter(filename, self.year, comment) as f:
code = []
for capability in self.capability_flags:
code.append("%s(%s)" %
(capability['name'],
('false', 'true')['default' in capability]))
code.append("cached_%s(%s)" %
(capability['name'],
('false', 'true')['default' in capability]))
f.write("ContextState::EnableFlags::EnableFlags()\n : %s {\n}\n" %
",\n ".join(code))
f.write("\n")
f.write("void ContextState::Initialize() {\n")
for state_name in sorted(_STATE_INFO.keys()):
state = _STATE_INFO[state_name]
for item in state['states']:
if isinstance(item['default'], list):
for ndx, value in enumerate(item['default']):
f.write(" %s[%d] = %s;\n" % (item['name'], ndx, value))
else:
f.write(" %s = %s;\n" % (item['name'], item['default']))
if item.get('cached', False):
if isinstance(item['default'], list):
for ndx, value in enumerate(item['default']):
f.write(" cached_%s[%d] = %s;\n" % (item['name'], ndx, value))
else:
f.write(" cached_%s = %s;\n" % (item['name'], item['default']))
f.write("}\n")
f.write("""
void ContextState::InitCapabilities(const ContextState* prev_state) const {
""")
def WriteCapabilities(test_prev, es3_caps):
for capability in self.capability_flags:
capability_name = capability['name']
capability_no_init = 'no_init' in capability and \
capability['no_init'] == True
if capability_no_init:
continue
capability_es3 = 'es3' in capability and capability['es3'] == True
if capability_es3 and not es3_caps or not capability_es3 and es3_caps:
continue
if 'extension_flag' in capability:
f.write(" if (feature_info_->feature_flags().%s) {\n " %
capability['extension_flag'])
if test_prev:
f.write(""" if (prev_state->enable_flags.cached_%s !=
enable_flags.cached_%s) {\n""" %
(capability_name, capability_name))
f.write(" EnableDisable(GL_%s, enable_flags.cached_%s);\n" %
(capability_name.upper(), capability_name))
if test_prev:
f.write(" }")
if 'extension_flag' in capability:
f.write(" }")
f.write(" if (prev_state) {")
WriteCapabilities(True, False)
f.write(" if (feature_info_->IsES3Capable()) {\n")
WriteCapabilities(True, True)
f.write(" }\n")
f.write(" } else {")
WriteCapabilities(False, False)
f.write(" if (feature_info_->IsES3Capable()) {\n")
WriteCapabilities(False, True)
f.write(" }\n")
f.write(" }")
f.write("""}
void ContextState::InitState(const ContextState *prev_state) const {
""")
def WriteStates(test_prev):
# We need to sort the keys so the expectations match
for state_name in sorted(_STATE_INFO.keys()):
state = _STATE_INFO[state_name]
if 'no_init' in state and state['no_init']:
continue
if state['type'] == 'FrontBack':
num_states = len(state['states'])
for ndx, group in enumerate(Grouper(num_states / 2,
state['states'])):
if test_prev:
f.write(" if (")
args = []
for place, item in enumerate(group):
item_name = CachedStateName(item)
args.append('%s' % item_name)
if test_prev:
if place > 0:
f.write(' ||\n')
f.write("(%s != prev_state->%s)" % (item_name, item_name))
if test_prev:
f.write(")\n")
f.write(
" api()->gl%sFn(%s, %s);\n" %
(state['func'], ('GL_FRONT', 'GL_BACK')[ndx],
", ".join(args)))
elif state['type'] == 'NamedParameter':
for item in state['states']:
item_name = CachedStateName(item)
operation = []
if test_prev:
if isinstance(item['default'], list):
operation.append(" if (memcmp(prev_state->%s, %s, "
"sizeof(%s) * %d)) {\n" %
(item_name, item_name, item['type'],
len(item['default'])))
else:
operation.append(" if (prev_state->%s != %s) {\n " %
(item_name, item_name))
operation.append(" api()->gl%sFn(%s, %s);\n" %
(state['func'],
(item['enum_set']
if 'enum_set' in item else item['enum']),
item['name']))
if test_prev:
operation.append(" }")
guarded_operation = GuardState(item, ''.join(operation),
"feature_info_")
f.write(guarded_operation)
else:
if 'extension_flag' in state:
f.write(" if (feature_info_->feature_flags().%s)\n " %
state['extension_flag'])
if test_prev:
f.write(" if (")
args = []
for place, item in enumerate(state['states']):
item_name = CachedStateName(item)
args.append('%s' % item_name)
if test_prev:
if place > 0:
f.write(' ||\n')
f.write("(%s != prev_state->%s)" %
(item_name, item_name))
if test_prev:
f.write(" )\n")
if 'custom_function' in state:
f.write(" %s(%s);\n" % (state['func'], ", ".join(args)))
else:
f.write(" api()->gl%sFn(%s);\n" % (state['func'],
", ".join(args)))
f.write(" if (prev_state) {")
WriteStates(True)
f.write(" } else {")
WriteStates(False)
f.write(" }")
f.write(" InitStateManual(prev_state);")
f.write("}\n")
f.write("""bool ContextState::GetEnabled(GLenum cap) const {
switch (cap) {
""")
for capability in self.capability_flags:
f.write(" case GL_%s:\n" % capability['name'].upper())
f.write(" return enable_flags.%s;\n" % capability['name'])
f.write(""" default:
NOTREACHED();
return false;
}
}
""")
self.WriteContextStateGetters(f, "ContextState")
self.generated_cpp_filenames.append(filename)
def WriteClientContextStateImpl(self, filename):
"""Writes the context state client side implementation."""
comment = "// It is included by client_context_state.cc\n"
with CHeaderWriter(filename, self.year, comment) as f:
code = []
for capability in self.capability_flags:
if 'extension_flag' in capability:
continue
code.append("%s(%s)" %
(capability['name'],
('false', 'true')['default' in capability]))
f.write(
"ClientContextState::EnableFlags::EnableFlags()\n : %s {\n}\n" %
",\n ".join(code))
f.write("\n")
f.write("""
bool ClientContextState::SetCapabilityState(
GLenum cap, bool enabled, bool* changed) {
*changed = false;
switch (cap) {
""")
for capability in self.capability_flags:
if 'extension_flag' in capability:
continue
f.write(" case GL_%s:\n" % capability['name'].upper())
f.write(""" if (enable_flags.%(name)s != enabled) {
*changed = true;
enable_flags.%(name)s = enabled;
}
return true;
""" % capability)
f.write(""" default:
return false;
}
}
""")
f.write("""bool ClientContextState::GetEnabled(
GLenum cap, bool* enabled) const {
switch (cap) {
""")
for capability in self.capability_flags:
if 'extension_flag' in capability:
continue
f.write(" case GL_%s:\n" % capability['name'].upper())
f.write(" *enabled = enable_flags.%s;\n" % capability['name'])
f.write(" return true;\n")
f.write(""" default:
return false;
}
}
""")
self.generated_cpp_filenames.append(filename)
def WriteServiceImplementation(self, filename):
"""Writes the service decoder implementation."""
comment = "// It is included by %s_cmd_decoder.cc\n" % _lower_prefix
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.functions:
func.WriteServiceImplementation(f)
if self.capability_flags and _prefix == 'GLES2':
f.write("""
bool GLES2DecoderImpl::SetCapabilityState(GLenum cap, bool enabled) {
switch (cap) {
""")
for capability in self.capability_flags:
f.write(" case GL_%s:\n" % capability['name'].upper())
if 'on_change' in capability:
f.write("""\
state_.enable_flags.%(name)s = enabled;
if (state_.enable_flags.cached_%(name)s != enabled
|| state_.ignore_cached_state) {
%(on_change)s
}
return false;
""" % capability)
else:
f.write("""\
state_.enable_flags.%(name)s = enabled;
if (state_.enable_flags.cached_%(name)s != enabled
|| state_.ignore_cached_state) {
state_.enable_flags.cached_%(name)s = enabled;
return true;
}
return false;
""" % capability)
f.write(""" default:
NOTREACHED();
return false;
}
}
""")
self.generated_cpp_filenames.append(filename)
def WritePassthroughServiceImplementation(self, filename):
"""Writes the passthrough service decoder implementation."""
with CWriter(filename, self.year) as f:
header = """
#include \"gpu/command_buffer/service/gles2_cmd_decoder_passthrough.h\"
namespace gpu {
namespace gles2 {
""";
f.write(header);
for func in self.functions:
func.WritePassthroughServiceImplementation(f)
footer = """
} // namespace gles2
} // namespace gpu
""";
f.write(footer);
self.generated_cpp_filenames.append(filename)
def WriteServiceUnitTests(self, filename_pattern):
"""Writes the service decoder unit tests."""
num_tests = len(self.functions)
FUNCTIONS_PER_FILE = 98 # hard code this so it doesn't change.
count = 0
for test_num in range(0, num_tests, FUNCTIONS_PER_FILE):
count += 1
filename = filename_pattern % count
comment = "// It is included by %s_cmd_decoder_unittest_%d.cc\n" \
% (_lower_prefix, count)
with CHeaderWriter(filename, self.year, comment) as f:
end = test_num + FUNCTIONS_PER_FILE
if end > num_tests:
end = num_tests
for idx in range(test_num, end):
func = self.functions[idx]
test_name = '%sDecoderTest%d' % (_prefix, count)
if func.IsES3():
test_name = 'GLES3DecoderTest%d' % count
# Do any filtering of the functions here, so that the functions
# will not move between the numbered files if filtering properties
# are changed.
if func.GetInfo('extension_flag'):
continue
if func.GetInfo('unit_test') != False:
func.WriteServiceUnitTest(f, {
'test_name': test_name
})
self.generated_cpp_filenames.append(filename)
def WriteServiceContextStateTestHelpers(self, filename):
comment = "// It is included by context_state_test_helpers.cc\n"
with CHeaderWriter(filename, self.year, comment) as f:
if self.capability_flags:
f.write(
"""void ContextStateTestHelpers::SetupInitCapabilitiesExpectations(
MockGL* gl,
gles2::FeatureInfo* feature_info) {
""")
for capability in self.capability_flags:
capability_no_init = 'no_init' in capability and \
capability['no_init'] == True
if capability_no_init:
continue
capability_es3 = 'es3' in capability and capability['es3'] == True
if capability_es3:
continue
if 'extension_flag' in capability:
f.write(" if (feature_info->feature_flags().%s) {\n" %
capability['extension_flag'])
f.write(" ")
f.write(" ExpectEnableDisable(gl, GL_%s, %s);\n" %
(capability['name'].upper(),
('false', 'true')['default' in capability]))
if 'extension_flag' in capability:
f.write(" }")
f.write(" if (feature_info->IsES3Capable()) {")
for capability in self.capability_flags:
capability_es3 = 'es3' in capability and capability['es3'] == True
if capability_es3:
f.write(" ExpectEnableDisable(gl, GL_%s, %s);\n" %
(capability['name'].upper(),
('false', 'true')['default' in capability]))
f.write(""" }
}
""")
f.write("""
void ContextStateTestHelpers::SetupInitStateExpectations(
MockGL* gl,
gles2::FeatureInfo* feature_info,
const gfx::Size& initial_size) {
""")
# We need to sort the keys so the expectations match
for state_name in sorted(_STATE_INFO.keys()):
state = _STATE_INFO[state_name]
if state['type'] == 'FrontBack':
num_states = len(state['states'])
for ndx, group in enumerate(Grouper(num_states / 2,
state['states'])):
args = []
for item in group:
if 'expected' in item:
args.append(item['expected'])
else:
args.append(item['default'])
f.write(
" EXPECT_CALL(*gl, %s(%s, %s))\n" %
(state['func'], ('GL_FRONT', 'GL_BACK')[ndx],
", ".join(args)))
f.write(" .Times(1)\n")
f.write(" .RetiresOnSaturation();\n")
elif state['type'] == 'NamedParameter':
for item in state['states']:
expect_value = item['default']
if isinstance(expect_value, list):
# TODO: Currently we do not check array values.
expect_value = "_"
operation = []
operation.append(
" EXPECT_CALL(*gl, %s(%s, %s))\n" %
(state['func'],
(item['enum_set']
if 'enum_set' in item else item['enum']),
expect_value))
operation.append(" .Times(1)\n")
operation.append(" .RetiresOnSaturation();\n")
guarded_operation = GuardState(item, ''.join(operation),
"feature_info")
f.write(guarded_operation)
elif 'no_init' not in state:
if 'extension_flag' in state:
f.write(" if (feature_info->feature_flags().%s) {\n" %
state['extension_flag'])
f.write(" ")
args = []
for item in state['states']:
if 'expected' in item:
args.append(item['expected'])
else:
args.append(item['default'])
# TODO: Currently we do not check array values.
args = ["_" if isinstance(arg, list) else arg for arg in args]
if 'custom_function' in state:
f.write(" SetupInitStateManualExpectationsFor%s(gl, %s);\n" %
(state['func'], ", ".join(args)))
else:
f.write(" EXPECT_CALL(*gl, %s(%s))\n" %
(state['func'], ", ".join(args)))
f.write(" .Times(1)\n")
f.write(" .RetiresOnSaturation();\n")
if 'extension_flag' in state:
f.write(" }\n")
f.write(" SetupInitStateManualExpectations(gl, feature_info);\n")
f.write("}\n")
self.generated_cpp_filenames.append(filename)
def WriteServiceUnitTestsForExtensions(self, filename):
"""Writes the service decoder unit tests for functions with extension_flag.
The functions are special in that they need a specific unit test
baseclass to turn on the extension.
"""
functions = [f for f in self.functions if f.GetInfo('extension_flag')]
comment = "// It is included by gles2_cmd_decoder_unittest_extensions.cc\n"
with CHeaderWriter(filename, self.year, comment) as f:
for func in functions:
if True:
if func.GetInfo('unit_test') != False:
extension = ToCamelCase(
ToGLExtensionString(func.GetInfo('extension_flag')))
test_name = 'GLES2DecoderTestWith%s' % extension
if func.IsES3():
test_name = 'GLES3DecoderTestWith%s' % extension
func.WriteServiceUnitTest(f, {
'test_name': test_name
})
self.generated_cpp_filenames.append(filename)
def WriteGLES2Header(self, filename):
"""Writes the GLES2 header."""
comment = "// This file contains Chromium-specific GLES2 declarations.\n\n"
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2Header(f)
f.write("\n")
self.generated_cpp_filenames.append(filename)
def WriteGLES2CLibImplementation(self, filename):
"""Writes the GLES2 c lib implementation."""
comment = "// These functions emulate GLES2 over command buffers.\n"
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2CLibImplementation(f)
f.write("""
namespace gles2 {
extern const NameToFunc g_gles2_function_table[] = {
""")
for func in self.original_functions:
f.write(
' { "gl%s", reinterpret_cast<GLES2FunctionPointer>(gl%s), },\n' %
(func.name, func.name))
f.write(""" { nullptr, nullptr, },
};
} // namespace gles2
""")
self.generated_cpp_filenames.append(filename)
def WriteGLES2InterfaceHeader(self, filename):
"""Writes the GLES2 interface header."""
comment = ("// This file is included by %s_interface.h to declare the\n"
"// GL api functions.\n" % _lower_prefix)
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2InterfaceHeader(f)
self.generated_cpp_filenames.append(filename)
def WriteGLES2InterfaceStub(self, filename):
"""Writes the GLES2 interface stub header."""
comment = "// This file is included by gles2_interface_stub.h.\n"
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2InterfaceStub(f)
self.generated_cpp_filenames.append(filename)
def WriteGLES2InterfaceStubImpl(self, filename):
"""Writes the GLES2 interface header."""
comment = "// This file is included by gles2_interface_stub.cc.\n"
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2InterfaceStubImpl(f)
self.generated_cpp_filenames.append(filename)
def WriteGLES2ImplementationHeader(self, filename):
"""Writes the GLES2 Implementation header."""
comment = \
("// This file is included by %s_implementation.h to declare the\n"
"// GL api functions.\n" % _lower_prefix)
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2ImplementationHeader(f)
self.generated_cpp_filenames.append(filename)
def WriteGLES2Implementation(self, filename):
"""Writes the GLES2 Implementation."""
comment = \
("// This file is included by %s_implementation.cc to define the\n"
"// GL api functions.\n" % _lower_prefix)
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2Implementation(f)
self.generated_cpp_filenames.append(filename)
def WriteGLES2TraceImplementationHeader(self, filename):
"""Writes the GLES2 Trace Implementation header."""
comment = "// This file is included by gles2_trace_implementation.h\n"
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2TraceImplementationHeader(f)
self.generated_cpp_filenames.append(filename)
def WriteGLES2TraceImplementation(self, filename):
"""Writes the GLES2 Trace Implementation."""
comment = "// This file is included by gles2_trace_implementation.cc\n"
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2TraceImplementation(f)
self.generated_cpp_filenames.append(filename)
def WriteGLES2ImplementationUnitTests(self, filename):
"""Writes the GLES2 helper header."""
comment = \
("// This file is included by %s_implementation.h to declare the\n"
"// GL api functions.\n" % _lower_prefix)
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2ImplementationUnitTest(f)
self.generated_cpp_filenames.append(filename)
def WriteServiceUtilsHeader(self, filename):
"""Writes the gles2 auto generated utility header."""
with CHeaderWriter(filename, self.year) as f:
for name in sorted(self.named_type_info.keys()):
named_type = NamedType(self.named_type_info[name])
if not named_type.CreateValidator():
continue
class_name = ValidatorClassName(name)
if named_type.IsComplete():
f.write("""class %(class_name)s {
public:
bool IsValid(const %(type)s value) const;"""% {
'class_name': class_name,
'type': named_type.GetType()
})
if named_type.HasES3Values():
f.write("""%s();
void SetIsES3(bool is_es3) { is_es3_ = is_es3; }
private:
bool is_es3_;""" % class_name)
f.write("};\n")
f.write("%s %s;\n\n" %
(class_name, ToUnderscore(name)))
else:
f.write("ValueValidator<%s> %s;\n" %
(named_type.GetType(), ToUnderscore(name)))
f.write("\n")
self.generated_cpp_filenames.append(filename)
def WriteServiceUtilsImplementation(self, filename):
"""Writes the gles2 auto generated utility implementation."""
with CHeaderWriter(filename, self.year) as f:
names = sorted(self.named_type_info.keys())
for name in names:
named_type = NamedType(self.named_type_info[name])
class_name = ValidatorClassName(name)
if not named_type.CreateValidator():
continue
if named_type.IsComplete():
if named_type.HasES3Values():
f.write("""Validators::%(class_name)s::%(class_name)s()
: is_es3_(false) {}""" % { 'class_name': class_name })
f.write("""bool Validators::%(class_name)s::IsValid(
const %(type)s value) const {
switch(value) {\n""" % {
'class_name': class_name,
'type': named_type.GetType()
})
if named_type.GetValidValues():
for value in named_type.GetValidValues():
f.write("case %s:\n" % value)
f.write("return true;\n")
if named_type.GetValidValuesES3():
for value in named_type.GetValidValuesES3():
f.write("case %s:\n" % value)
f.write("return is_es3_;\n")
if named_type.GetDeprecatedValuesES3():
for value in named_type.GetDeprecatedValuesES3():
f.write("case %s:\n" % value)
f.write("return !is_es3_;\n")
f.write("}\nreturn false;\n}\n")
f.write("\n")
else:
if named_type.GetValidValues():
f.write("static const %s valid_%s_table[] = {\n" %
(named_type.GetType(), ToUnderscore(name)))
for value in named_type.GetValidValues():
f.write(" %s,\n" % value)
f.write("};\n")
f.write("\n")
if named_type.GetValidValuesES3():
f.write("static const %s valid_%s_table_es3[] = {\n" %
(named_type.GetType(), ToUnderscore(name)))
for value in named_type.GetValidValuesES3():
f.write(" %s,\n" % value)
f.write("};\n")
f.write("\n")
if named_type.GetDeprecatedValuesES3():
f.write("static const %s deprecated_%s_table_es3[] = {\n" %
(named_type.GetType(), ToUnderscore(name)))
for value in named_type.GetDeprecatedValuesES3():
f.write(" %s,\n" % value)
f.write("};\n")
f.write("\n")
f.write("Validators::Validators()")
pre = ' : '
for name in names:
named_type = NamedType(self.named_type_info[name])
if not named_type.CreateValidator() or named_type.IsComplete():
continue
if named_type.GetValidValues():
code = """%(pre)s%(name)s(
valid_%(name)s_table, base::size(valid_%(name)s_table))"""
else:
code = "%(pre)s%(name)s()"
f.write(code % {
'name': ToUnderscore(name),
'pre': pre,
})
pre = ',\n '
f.write(" {\n");
f.write("}\n\n");
if _prefix == 'GLES2':
f.write("void Validators::UpdateValuesES3() {\n")
for name in names:
named_type = NamedType(self.named_type_info[name])
if not named_type.IsConstant() and named_type.IsComplete():
if named_type.HasES3Values():
f.write(" %(name)s.SetIsES3(true);" % {
'name': ToUnderscore(name),
})
continue
if named_type.GetDeprecatedValuesES3():
code = """ %(name)s.RemoveValues(
deprecated_%(name)s_table_es3, base::size(deprecated_%(name)s_table_es3));
"""
f.write(code % {
'name': ToUnderscore(name),
})
if named_type.GetValidValuesES3():
code = """ %(name)s.AddValues(
valid_%(name)s_table_es3, base::size(valid_%(name)s_table_es3));
"""
f.write(code % {
'name': ToUnderscore(name),
})
f.write("}\n\n");
f.write("void Validators::UpdateETCCompressedTextureFormats() {\n")
for name in ['CompressedTextureFormat', 'TextureInternalFormatStorage']:
for fmt in _ETC_COMPRESSED_TEXTURE_FORMATS:
code = """ %(name)s.AddValue(%(format)s);
"""
f.write(code % {
'name': ToUnderscore(name),
'format': fmt,
})
f.write("}\n\n");
self.generated_cpp_filenames.append(filename)
def WriteCommonUtilsHeader(self, filename):
"""Writes the gles2 common utility header."""
with CHeaderWriter(filename, self.year) as f:
type_infos = sorted(self.named_type_info.keys())
for type_info in type_infos:
if self.named_type_info[type_info]['type'] == 'GLenum':
f.write("static std::string GetString%s(uint32_t value);\n" %
type_info)
f.write("\n")
self.generated_cpp_filenames.append(filename)
def WriteCommonUtilsImpl(self, filename):
"""Writes the gles2 common utility header."""
enum_re = re.compile(r'\#define\s+(GL_[a-zA-Z0-9_]+)\s+([0-9A-Fa-fx]+)')
define_dict = {}
for fname in ['third_party/khronos/GLES2/gl2.h',
'third_party/khronos/GLES2/gl2ext.h',
'third_party/khronos/GLES3/gl3.h',
'third_party/khronos/GLES3/gl31.h',
'gpu/GLES2/gl2chromium.h',
'gpu/GLES2/gl2extchromium.h']:
fname = os.path.join(self.chromium_root_dir, fname)
lines = open(fname).readlines()
for line in lines:
m = enum_re.match(line)
if m:
name = m.group(1)
value = m.group(2)
if len(value) <= 10 and value.startswith('0x'):
if not value in define_dict:
define_dict[value] = name
# check our own _CHROMIUM macro conflicts with khronos GL headers.
elif EnumsConflict(define_dict[value], name):
self.Error("code collision: %s and %s have the same code %s" %
(define_dict[value], name, value))
with CHeaderWriter(filename, self.year) as f:
f.write("static const %sUtil::EnumToString "
"enum_to_string_table[] = {\n" % _prefix)
for value in sorted(define_dict):
f.write(' { %s, "%s", },\n' % (value, define_dict[value]))
f.write("""};
const %(p)sUtil::EnumToString* const %(p)sUtil::enum_to_string_table_ =
enum_to_string_table;
const size_t %(p)sUtil::enum_to_string_table_len_ =
sizeof(enum_to_string_table) / sizeof(enum_to_string_table[0]);
""" % { 'p' : _prefix})
enums = sorted(self.named_type_info.keys())
for enum in enums:
if self.named_type_info[enum]['type'] == 'GLenum':
f.write("std::string %sUtil::GetString%s(uint32_t value) {\n" %
(_prefix, enum))
valid_list = self.named_type_info[enum]['valid']
if 'valid_es3' in self.named_type_info[enum]:
for es3_enum in self.named_type_info[enum]['valid_es3']:
if not es3_enum in valid_list:
valid_list.append(es3_enum)
assert len(valid_list) == len(set(valid_list))
if len(valid_list) > 0:
f.write(" static const EnumToString string_table[] = {\n")
for value in valid_list:
f.write(' { %s, "%s" },\n' % (value, value))
f.write(""" };
return %sUtil::GetQualifiedEnumString(
string_table, base::size(string_table), value);
}
""" % _prefix)
else:
f.write(""" return %sUtil::GetQualifiedEnumString(
nullptr, 0, value);
}
""" % _prefix)
self.generated_cpp_filenames.append(filename)
def WritePepperGLES2Interface(self, filename, dev):
"""Writes the Pepper OpenGLES interface definition."""
with CWriter(filename, self.year) as f:
f.write("label Chrome {\n")
f.write(" M39 = 1.0\n")
f.write("};\n\n")
if not dev:
# Declare GL types.
f.write("[version=1.0]\n")
f.write("describe {\n")
for gltype in ['GLbitfield', 'GLboolean', 'GLbyte', 'GLclampf',
'GLclampx', 'GLenum', 'GLfixed', 'GLfloat', 'GLint',
'GLintptr', 'GLshort', 'GLsizei', 'GLsizeiptr',
'GLubyte', 'GLuint', 'GLushort']:
f.write(" %s;\n" % gltype)
f.write(" %s_ptr_t;\n" % gltype)
f.write("};\n\n")
# C level typedefs.
f.write("#inline c\n")
f.write("#include \"ppapi/c/pp_resource.h\"\n")
if dev:
f.write("#include \"ppapi/c/ppb_opengles2.h\"\n\n")
else:
f.write("\n#ifndef __gl2_h_\n")
for (k, v) in _GL_TYPES.iteritems():
f.write("typedef %s %s;\n" % (v, k))
f.write("#ifdef _WIN64\n")
for (k, v) in _GL_TYPES_64.iteritems():
f.write("typedef %s %s;\n" % (v, k))
f.write("#else\n")
for (k, v) in _GL_TYPES_32.iteritems():
f.write("typedef %s %s;\n" % (v, k))
f.write("#endif // _WIN64\n")
f.write("#endif // __gl2_h_\n\n")
f.write("#endinl\n")
for interface in self.pepper_interfaces:
if interface.dev != dev:
continue
# Historically, we provide OpenGLES2 interfaces with struct
# namespace. Not to break code which uses the interface as
# "struct OpenGLES2", we put it in struct namespace.
f.write('\n[macro="%s", force_struct_namespace]\n' %
interface.GetInterfaceName())
f.write("interface %s {\n" % interface.GetStructName())
for func in self.original_functions:
if not func.InPepperInterface(interface):
continue
ret_type = func.MapCTypeToPepperIdlType(func.return_type,
is_for_return_type=True)
func_prefix = " %s %s(" % (ret_type, func.GetPepperName())
f.write(func_prefix)
f.write("[in] PP_Resource context")
for arg in func.MakeTypedPepperIdlArgStrings():
f.write(",\n" + " " * len(func_prefix) + arg)
f.write(");\n")
f.write("};\n\n")
def WritePepperGLES2Implementation(self, filename):
"""Writes the Pepper OpenGLES interface implementation."""
with CWriter(filename, self.year) as f:
f.write("#include \"ppapi/shared_impl/ppb_opengles2_shared.h\"\n\n")
f.write("#include \"base/logging.h\"\n")
f.write("#include \"gpu/command_buffer/client/gles2_implementation.h\"\n")
f.write("#include \"ppapi/shared_impl/ppb_graphics_3d_shared.h\"\n")
f.write("#include \"ppapi/thunk/enter.h\"\n\n")
f.write("namespace ppapi {\n\n")
f.write("namespace {\n\n")
f.write("typedef thunk::EnterResource<thunk::PPB_Graphics3D_API>"
" Enter3D;\n\n")
f.write("gpu::gles2::GLES2Implementation* ToGles2Impl(Enter3D*"
" enter) {\n")
f.write(" DCHECK(enter);\n")
f.write(" DCHECK(enter->succeeded());\n")
f.write(" return static_cast<PPB_Graphics3D_Shared*>(enter->object())->"
"gles2_impl();\n");
f.write("}\n\n");
for func in self.original_functions:
if not func.InAnyPepperExtension():
continue
original_arg = func.MakeTypedPepperArgString("")
context_arg = "PP_Resource context_id"
if len(original_arg):
arg = context_arg + ", " + original_arg
else:
arg = context_arg
f.write("%s %s(%s) {\n" %
(func.return_type, func.GetPepperName(), arg))
f.write(" Enter3D enter(context_id, true);\n")
f.write(" if (enter.succeeded()) {\n")
return_str = "" if func.return_type == "void" else "return "
f.write(" %sToGles2Impl(&enter)->%s(%s);\n" %
(return_str, func.original_name,
func.MakeOriginalArgString("")))
f.write(" }")
if func.return_type == "void":
f.write("\n")
else:
f.write(" else {\n")
f.write(" return %s;\n" % func.GetErrorReturnString())
f.write(" }\n")
f.write("}\n\n")
f.write("} // namespace\n")
for interface in self.pepper_interfaces:
f.write("const %s* PPB_OpenGLES2_Shared::Get%sInterface() {\n" %
(interface.GetStructName(), interface.GetName()))
f.write(" static const struct %s "
"ppb_opengles2 = {\n" % interface.GetStructName())
f.write(" &")
f.write(",\n &".join(
f.GetPepperName() for f in self.original_functions
if f.InPepperInterface(interface)))
f.write("\n")
f.write(" };\n")
f.write(" return &ppb_opengles2;\n")
f.write("}\n")
f.write("} // namespace ppapi\n")
self.generated_cpp_filenames.append(filename)
def WriteGLES2ToPPAPIBridge(self, filename):
"""Connects GLES2 helper library to PPB_OpenGLES2 interface"""
with CWriter(filename, self.year) as f:
f.write("#ifndef GL_GLEXT_PROTOTYPES\n")
f.write("#define GL_GLEXT_PROTOTYPES\n")
f.write("#endif\n")
f.write("#include <GLES2/gl2.h>\n")
f.write("#include <GLES2/gl2ext.h>\n")
f.write("#include \"ppapi/lib/gl/gles2/gl2ext_ppapi.h\"\n\n")
for func in self.original_functions:
if not func.InAnyPepperExtension():
continue
interface = self.interface_info[func.GetInfo('pepper_interface') or '']
f.write("%s GL_APIENTRY gl%s(%s) {\n" %
(func.return_type, func.GetPepperName(),
func.MakeTypedPepperArgString("")))
return_str = "" if func.return_type == "void" else "return "
interface_str = "glGet%sInterfacePPAPI()" % interface.GetName()
original_arg = func.MakeOriginalArgString("")
context_arg = "glGetCurrentContextPPAPI()"
if len(original_arg):
arg = context_arg + ", " + original_arg
else:
arg = context_arg
if interface.GetName():
f.write(" const struct %s* ext = %s;\n" %
(interface.GetStructName(), interface_str))
f.write(" if (ext)\n")
f.write(" %sext->%s(%s);\n" %
(return_str, func.GetPepperName(), arg))
if return_str:
f.write(" %s0;\n" % return_str)
else:
f.write(" %s%s->%s(%s);\n" %
(return_str, interface_str, func.GetPepperName(), arg))
f.write("}\n\n")
self.generated_cpp_filenames.append(filename)
def Format(generated_files, output_dir, chromium_root_dir):
"""Format generated_files relative to output_dir using clang-format."""
formatter = "third_party/depot_tools/clang-format"
if platform.system() == "Windows":
formatter = "third_party\\depot_tools\\clang-format.bat"
formatter = os.path.join(chromium_root_dir, formatter)
generated_files = map(lambda filename: os.path.join(output_dir, filename),
generated_files)
for filename in generated_files:
call([formatter, "-i", "-style=chromium", filename], cwd=chromium_root_dir)
| bsd-3-clause | -5,548,171,058,463,492,000 | 33.578348 | 80 | 0.592681 | false | 3.373271 | true | false | false |
aewallin/openvoronoi | python_examples/arc/arc_2_draw_glyph.py | 1 | 5012 | # This script only draws a glyph, no VD is calculated
# truetype-tracer also outputs arcs, and we plot them orange
import truetypetracer as ttt # https://github.com/aewallin/truetype-tracer
import openvoronoi as ovd # https://github.com/aewallin/openvoronoi
import ovdvtk
import time
import vtk
def drawLine(myscreen, previous, p, loopColor):
myscreen.addActor(ovdvtk.Line(p1=(previous[0], previous[1], 0), p2=(p[0], p[1], 0), color=loopColor))
def drawSeg(myscreen, previous, p):
ovdvtk.drawVertex(myscreen, ovd.Point(p[0], p[1]), 0.0001, ovdvtk.red)
if (p[2] == -1): # a line-segment
drawLine(myscreen, previous, p, ovdvtk.yellow)
else: # an arc
prev = ovd.Point(previous[0], previous[1])
target = ovd.Point(p[0], p[1])
radius = p[2]
cw = p[3]
center = ovd.Point(p[4], p[5])
# print "prev ",prev
# print "center ",center
# print "diff ",prev-center
# print "p ",p
ovdvtk.drawArc(myscreen, prev, target, radius, center, cw, ovdvtk.orange)
# drawArc(myscreen, pt1, pt2, r, cen, cw, arcColor, da=0.1)
# r, cen, cw, arcColor, da=0.1)
def drawLoops(myscreen, loops, loopColor):
# draw the loops
nloop = 0
for lop in loops:
n = 0
N = len(lop)
first_point = []
previous = []
n_lines = 0
n_arcs = 0
for p in lop:
# p = [x, y, r, cw, cx, cy]
if n == 0: # don't draw anything on the first iteration
previous = p
first_point = p
elif n == (N - 1): # the last point
drawSeg(myscreen, previous, p)
if p[2] == -1:
n_lines += 1
else:
n_arcs += 1
drawSeg(myscreen, p, first_point)
if first_point[2] == -1:
n_lines += 1
else:
n_arcs += 1
else: # normal segment
drawSeg(myscreen, previous, p)
if p[2] == -1:
n_lines += 1
else:
n_arcs += 1
previous = p
n = n + 1
print "rendered loop ", nloop, " with ", len(lop), " points"
print " n_lines = ", n_lines
print " n_arcs = ", n_arcs
nloop = nloop + 1
def translate(segs, x, y):
out = []
for seg in segs:
seg2 = []
for p in seg:
p2 = p
p2[0] += x
p2[1] += y
p2[4] += x
p2[5] += y
seg2.append(p2)
# seg2.append(seg[3] + y)
out.append(seg2)
return out
def modify_segments(segs):
segs_mod = []
for seg in segs:
first = seg[0]
last = seg[len(seg) - 1]
assert (first[0] == last[0] and first[1] == last[1])
seg.pop()
segs_mod.append(seg)
return segs_mod
def draw_ttt(myscreen, text, x, y, scale):
wr = ttt.SEG_Writer()
# wr.arc = False
wr.arc = True
# wr.conic = False
# wr.cubic = False
wr.scale = float(1) / float(scale)
# "L" has 36 points by default
wr.conic_biarc_subdivision = 200
wr.conic_line_subdivision = 50 # this increasesn nr of points to 366
# wr.cubic_biarc_subdivision = 10 # no effect?
# wr.cubic_line_subdivision = 10 # no effect?
wr.setFont(0)
s3 = ttt.ttt(text, wr)
ext = wr.extents
dx = ext.maxx - ext.minx
segs = wr.get_segments()
segs = translate(segs, x, y)
print "number of polygons: ", len(segs)
np = 0
sum_pts = 0
segs = modify_segments(segs)
for s in segs:
sum_pts += len(s)
print " polygon ", np, " has ", len(s), " points"
np = np + 1
print "total points: ", sum_pts
drawLoops(myscreen, segs, ovdvtk.yellow)
# this script only draws geometry from ttt
# no voronoi-diagram is created!
if __name__ == "__main__":
print "ttt version = ", ttt.version()
# w=2500
# h=1500
# w=1920
# h=1080
# w=1024
# h=1024
w = 800
h = 600
myscreen = ovdvtk.VTKScreen(width=w, height=h)
ovdvtk.drawOCLtext(myscreen, rev_text=ovd.version())
scale = 1
far = 1
camPos = far
zmult = 3
myscreen.camera.SetPosition(0, -camPos / float(1000), zmult * camPos)
myscreen.camera.SetClippingRange(-(zmult + 1) * camPos, (zmult + 1) * camPos)
myscreen.camera.SetFocalPoint(0.0, 0, 0)
# draw a unit-circle
ca = ovdvtk.Circle(center=(0, 0, 0), radius=1, color=(0, 1, 1), resolution=50)
myscreen.addActor(ca)
# draw_ttt(myscreen, "R", 0,0,10000)
draw_ttt(myscreen, "ABCDEFGHIJKLMNOPQRSTUVWXYZ", -0.5, 0, 80000)
# draw_ttt(myscreen, "abcdefghijklmnopqrstuvwxyz", -0.5,-0.1,80000)
# draw_ttt(myscreen, "1234567890*", -0.5,-0.2,80000)
# draw_ttt(myscreen, "m", -0.5,-0.2,80000)
print "PYTHON All DONE."
myscreen.render()
myscreen.iren.Start()
| lgpl-2.1 | 7,714,759,631,097,631,000 | 27.971098 | 105 | 0.528132 | false | 3.050517 | false | false | false |
taikoa/wevolver-server | wevolve/home/migrations/0001_initial.py | 1 | 4539 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Category'
db.create_table(u'category', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=150)),
('created_user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='category_created_user', to=orm['users.User'])),
('created', self.gf('django.db.models.fields.DateTimeField')()),
('modified_user', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='category_modified_user', null=True, to=orm['users.User'])),
('modified', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
))
db.send_create_signal('home', ['Category'])
# Adding model 'Country'
db.create_table(u'countries', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=150)),
))
db.send_create_signal('home', ['Country'])
def backwards(self, orm):
# Deleting model 'Category'
db.delete_table(u'category')
# Deleting model 'Country'
db.delete_table(u'countries')
models = {
'home.category': {
'Meta': {'object_name': 'Category', 'db_table': "u'category'"},
'created': ('django.db.models.fields.DateTimeField', [], {}),
'created_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'category_created_user'", 'to': "orm['users.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'category_modified_user'", 'null': 'True', 'to': "orm['users.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '150'})
},
'home.country': {
'Meta': {'object_name': 'Country', 'db_table': "u'countries'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '150'})
},
'users.user': {
'Meta': {'object_name': 'User', 'db_table': "u'user'"},
'bio': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'country': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'country_id'"}),
'data': ('django.db.models.fields.TextField', [], {}),
'email': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interests': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.User']", 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'picture_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'picture_original_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'skills': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['home']
| agpl-3.0 | -3,417,519,280,203,244,000 | 58.723684 | 186 | 0.561577 | false | 3.639936 | false | false | false |
FA810/My_Codes | [Python] championship simulation/calendar.py | 1 | 1427 | '''
Author: Fabio Rizzello
Functions to create fixtures for a provided list of teams.
'''
ghost_team = "___"
def create_fixtures(teams_list):
if(len(teams_list) == 0):
return []
home = 0
away = 0
rounds = []
teams_number = len(teams_list)
# check even or odd number of teams, use ghost team in latter case
ghost = False
if ( teams_number % 2 == 1 ):
teams_list.append(ghost_team)
teams_number = len(teams_list)
ghost = True
# finding how many rounds and matches per round are involved
total_rounds = (teams_number - 1) * 2
matches_per_round = teams_number / 2
# completing fixtures
for single_round in range(0,total_rounds):
day = []
for mat in range(0,matches_per_round):
home = (single_round + mat) % (teams_number - 1)
away = (teams_number - 1 - mat + single_round) % (teams_number - 1)
if (mat == 0):
away = teams_number - 1
if(single_round % 2 == 0):
day.append([teams_list[home] , teams_list[away]])
else:
day.append([teams_list[away] , teams_list[home]])
rounds.append(day)
return rounds
def print_matches(rounds):
for single_round in rounds:
for match in single_round:
for team in match:
print team,
print
print
def try_module():
rounds = create_fixtures(['Napoli', 'Parma', 'Juventus', 'Roma'])
print_matches(rounds)
#print_matches(create_fixtures(['Napoli', 'Parma', 'Juventus', 'Roma', "cagat"]))
#try_module()
| apache-2.0 | -3,161,538,729,572,276,000 | 25.425926 | 81 | 0.644709 | false | 2.859719 | false | false | false |
BrainComputationLab/ncs | python/samples/models/test/input_test.py | 1 | 1161 | #!/usr/bin/python
import math
import os,sys
ncs_lib_path = ('../../../../python/')
sys.path.append(ncs_lib_path)
import ncs
def run(argv):
sim = ncs.Simulation()
bursting_parameters = sim.addNeuron("bursting","izhikevich",
{
"a": 0.02,
"b": 0.3,
"c": -50.0,
"d": 4.0,
"u": -12.0,
"v": -65.0,
"threshold": 30,
})
group_1=sim.addNeuronGroup("group_1",2,bursting_parameters,None)
if not sim.init(argv):
print "failed to initialize simulation."
return
sim.addStimulus("sine_current",
{
"amplitude_scale":10,
"time_scale": 200.0 / math.pi,
"phase": 0.0,
"amplitude_shift":10
},
group_1,
1,
0.01,
1.0)
# current_report=sim.addReport("group_1","neuron","synaptic_current",1.0)
# current_report.toStdOut()
voltage_report=sim.addReport("group_1","neuron","input_current",1.0,0.0,1.0).toStdOut()
#voltage_report.toAsciiFile("./bursting_izh.txt")
sim.run(duration=1.0)
return
if __name__ == "__main__":
run(sys.argv)
| bsd-2-clause | -4,768,359,869,967,613,000 | 24.23913 | 88 | 0.523686 | false | 2.931818 | false | false | false |
vrpolak/slowsort | default_impl_mutable_binary_tree_laden_node.py | 1 | 1645 | """Module that provides the default implementation of MutableBinaryTreeLadenNode.
TODO: Delete as no search here uses this."""
from mutable_binary_tree_laden_node import MutableBinaryTreeLadenNode
class DefaultImplMutableBinaryTreeLadenNode(MutableBinaryTreeLadenNode):
"""Node of binary tree, carrying a payload object.
Left and right children are either None or also Node.
Self is altered regularily to avoid excessive object creation."""
def __init__(self, payload):
"""Initialize an childless node."""
self.payload = payload
self.left_child = None
self.right_child = None
def get_payload(self):
"""Return the payload, do not change state."""
return self.payload
def get_left_child(self):
"""Return left child or None, do not change state."""
return self.left_child
def get_right_child(self):
"""Return right child or None, do not change state."""
return self.right_child
def swap_payload(self, payload):
"""Set the new payload, return the old payload."""
odl_payload = self.payload
self.payload = payload
return odl_payload
def swap_left_child(self, node):
"""Set node (may be None) as new left child, return the previous left child."""
old_left_child = self.left_child
self.left_child = node
return odl_left_child
def swap_right_child(self, node):
"""Set node (may be None) as new right child, return the previous right child."""
old_right_child = self.right_child
self.right_child = node
return odl_right_child
| agpl-3.0 | 124,697,273,611,737,150 | 33.270833 | 89 | 0.658359 | false | 4.196429 | false | false | false |
texastribune/scuole | config/urls.py | 1 | 3284 | # -*- coding: utf-8 -*-
"""
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.contrib import admin
from django.contrib.sitemaps.views import sitemap
from django.urls import include, path, re_path
from django.views import defaults as default_views
from scuole.campuses.sitemaps import CampusSitemap
from scuole.core.sitemaps import StaticSitemap
from scuole.core.views import (
AboutView,
AcceptRedirectView,
LandingView,
LookupView,
SearchView,
)
from scuole.districts.sitemaps import DistrictSitemap
from scuole.states.sitemaps import StateSitemap
from scuole.cohorts.sitemaps_county import CountyCohortSitemap
from scuole.cohorts.sitemaps_region import RegionCohortSitemap
from scuole.cohorts.sitemaps_state import StateCohortSitemap
sitemaps = {
"scuole.campuses": CampusSitemap,
"scuole.districts": DistrictSitemap,
"scuole.states": StateSitemap,
"scuole.cohorts_counties": CountyCohortSitemap,
"scuole.cohorts_regions": RegionCohortSitemap,
"scuole.cohorts_states": StateCohortSitemap,
"scuole.core": StaticSitemap,
}
def trigger_error(request):
division_by_zero = 1 / 0
urlpatterns = [
path('sentry-debug/', trigger_error),
path("", LandingView.as_view(), name="landing"),
path("outcomes/", include("scuole.cohorts.urls", namespace="cohorts")),
path("districts/", include("scuole.districts.urls", namespace="districts")),
path("states/", include("scuole.states.urls", namespace="states")),
path("search/", SearchView.as_view(), name="search"),
path("lookup/", LookupView.as_view(), name="lookup"),
path("about/", AboutView.as_view(), name="about"),
path("redirect/", AcceptRedirectView.as_view(), name="redirect"),
path("admin/", admin.site.urls),
path(
"sitemap.xml",
sitemap,
{"sitemaps": sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
]
# Test pages normally not reachable when DEBUG = True
if settings.DEBUG:
urlpatterns += [
path(
"400/", default_views.bad_request, {"exception": Exception("Bad request")}
),
path(
"403/",
default_views.permission_denied,
{"exception": Exception("Permission denied")},
),
path(
"404/",
default_views.page_not_found,
{"exception": Exception("Page not found")},
),
path("500/", default_views.server_error),
]
if "debug_toolbar" in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
re_path(r"^__debug__/", include(debug_toolbar.urls))
] + urlpatterns
| mit | 7,513,251,334,424,165,000 | 33.568421 | 86 | 0.670524 | false | 3.632743 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.